var/home/core/zuul-output/0000755000175000017500000000000015140116403014520 5ustar corecorevar/home/core/zuul-output/logs/0000755000175000017500000000000015140123172015465 5ustar corecorevar/home/core/zuul-output/logs/kubelet.log.gz0000644000175000017500000236466315140123020020256 0ustar corecoreikubelet.log_o[;r)Br'o-n(!9t%Cs7}g/غIs,r.k9GfB…Ϩ>+u6b}Wߟ/nm͊wqɻlOxN_P??xI[mEy},fۮWe~7Nû/wb~1;ZxsY~ݳ( 2[$7۫j{Zw鶾z?&~|XLXlN_/:oXx$%X"LADA@@tkޕf{5Wbx=@^J})K3x~JkwI|YowS˷j̶֛]/8 N Rm(of`\r\L>{Jm 0{vR̍>dQQ.aLk~g\UlxDJfw6xi1U2 c#FD?2SgafO3|,ejoLR3[ D HJP1Ub2i]$HU^L_cZ_:F9TJJ{,mvgL;: ԓ$a;ɾ7lַ;̵3](uX|&kΆ2fb4NvS)f$UX dcю)""û5h< #чOɁ^˺b}0w8_jiB8.^s?Hs,&,#zd4XBu!.F"`a"BD) ᧁQZ-D\h]Q!]Z8HGU=y&|'oZƧe7ΣԟRxxXԨkJ[8 ";ЗH F=y܇sθm@%*'9qvD]9X&;cɻs0I٘]_fy tt('/V/TB/ap+V9g%$P[4D2L'1bЛ]\s΍ic-ܕ4+ޥ^.w[A9/vb֜}>| TXNrdTs>RDPhإek-*듌D[5l2_nH[׫yTNʹ<ws~^B.Ǔg'AS'E`hmsJU # DuT%ZPt_WďPv`9 C|mRj)CMitmu׀svRڡc0SAA\c}or|MKrO] g"tta[I!;c%6$V<[+*J:AI \:-rR b B"~?4 W4B3lLRD|@Kfځ9g ? j럚Sř>]uw`C}-{C):fUr6v`mSΟ1c/n߭!'Y|7#RI)X)yCBoX^P\Ja 79clw/H tBFKskޒ1,%$BվCh,xɦS7PKi0>,A==lM9Ɍm4ެ˧jOC d-saܺCY "D^&M){ߘ>:i V4nQi1h$Zb)ŠȃAݢCj|<~cQ7Q!q/pCTSqQyN,QEFKBmw&X(q8e&щu##Ct9Btka7v Ө⸇N~AE6xd~?D ^`wC4na~Uc)(l fJw>]cNdusmUSTYh>Eeք DKiPo`3 aezH5^n(}+~hX(d#iI@YUXPKL:3LVY~,nbW;W8QufiŒSq3<uqMQhiae̱F+,~Mn3 09WAu@>4Cr+N\9fǶy{0$Swwu,4iL%8nFВFL2#h5+C:D6A@5D!p=T,ښVcX㯡`2\fIԖ{[R:+I:6&&{Ldrǒ*!;[tʡP=_RFZx[|mi ǿ/&GioWiO[BdG.*)Ym<`-RAJLڈ}D1ykd7"/6sF%%´ƭ*( :xB_2YKoSrm_7dPΣ|ͣn/𚃚p9w#z A7yTJ$KOL-aP+;;%+_6'Sr|@2nQ{aK|bjܒ^o(מO80$QxBcXE ء\G=~j{Mܚ: hLT!uP_T{G7C]Ch',ެJG~Jc{xt zܳ'鮱iX%x/QOݸ}S^vv^2M!.xR0I(P 'fΑQ)ۢWP Pe>F=>l |fͨ3|'_iMcĚIdo阊;md^6%rd9#_v2:Y`&US tDkQ;>" ء:9_))wF|;~(XA PLjy*#etĨB$"xㄡʪMc~)j 1駭~բ>XiN .U轋RQ'Vt3,F3,#Y3,kJ3,LhVnKauomˠ_>2h-n^2] e}gjFX@&avF묇cTy^}m .Ŏ7Uֻ󂊹P-\!3^.Y9[XԦo Έ')Ji.VՕH4~)(kKC&;嶑, }t&&\5u17\I@ 5O? ʴ(aPqPϟ'K0D"\KjPQ>Y{Ÿ>14`SČ.HPdp12 (6!<̅:xn<# -BȢ1I~ŋ-*|`В~_>ۅm}67X9z=Oa Am]fnޤ{"hd߃Ԉ|tLD3 7'yOc& LFs%B!sRE2K0p\0͙npV)̍F$X8a-bp)5,] Bo|ؖA]Y`-jyL'8>JJ{>źuMp(jL!M7uTźmr(Uxbbqe5rZ HҘ3ڴ(|e@xC{ͻ Aw!5ޖ=8p_Tk@2pos/*W#@UTkտ,Fպ̥ 9MGb&0ۺ*'qp^X7c&͌ӒҶW r@/m@6P!{`ͱ)m`6*G-5F 6=X#leU d6xTV6 gn&i"@*"mr栣 IEVpq 0sy OM*@ >n) u{Hk|v;tCl2m s]-$zQpɡr~]Si!ڣZmʢ鉗phw j8\c4>0` R?da,ȍ/ءfQ 2ؐfc}l 2窾ۉ1k;A@z>T+DE 6Хm<쉶K`'#NC5CL]5ݶI5XK.N)Q!>zt?zpPC ¶.vBTcm"Bsp rjﺧK]0/k<'dzM2dk–flE]_vE P / څZg`9r| 5W;`.4&XkĴp 6l0Cз5O[{B-bC\/`m(9A< f`mPіpNЦXn6g5m 7aTcTA,} q:|CBp_uFȆx6ڮܷnZ8dsMS^HэUlq 8\C[n膗:68DkM\7"Ǻzfbx]ۮC=1ÓOv$sY6eX%]Y{⦁# &SlM'iMJ았 t% ~@1c@K?k^rEXws zz.8`hiPܮbC7~n b?`CtjT6l>X+,Qb5ȳp`FMeXÅ0+!86{V5y8 M`_Uw ȗkU]a[.D}"\I5/1o٩|U戻,6t錳"EFk:ZM/!ݛ@pRu Iヵvyne 0=HH3n@.>C@{GP 9::3(6e™nvOσ =?6ͪ)Bppًu_w/m/0}T>CUX\!xl=ZVM\aٟ6h㗶E۶{O#X26.Fٱq1M k'JE%"2.*""]8yܑ4> >X1 smD) ̙TީXfnOFg㧤[Lo)[fLPBRB+x7{{? ףro_nն-2n6 Ym^]IL'M+;U t>x]U5g B(, qA9r;$IN&CM(F+ hGI~Q<웰[, qnriY]3_P${,<\V}7T g6Zapto}PhS/b&X0$Ba{a`W%ATevoYFF"4En.O8ϵq\FOXƀf qbTLhlw?8p@{]oOtsϑ`94t1!F PI;i`ޮMLX7sTGP7^s08p15w q o(uLYQB_dWoc0a#K1P,8]P)\wEZ(VҠQBT^e^0F;)CtT+{`Bh"% !.bBQPnT4ƈRa[F=3}+BVE~8R{3,>0|:,5j358W]>!Q1"6oT[ҟ^T;725Xa+wqlR)<#!9!籈K*:!@NI^S"H=ofLx _lp ꖚӜ3C 4dM @x>ۙZh _uoֺip&1ڙʪ4\RF_04H8@>fXmpLJ5jRS}_D U4x[c) ,`̔Dvckk5Ťã0le۞]o~oW(91ݧ$uxp/Cq6Un9%ZxðvGL qG $ X:w06 E=oWlzN7st˪C:?*|kިfc]| &ب^[%F%LI<0(씖;4A\`TQ.b0NH;ݹ/n -3!: _Jq#Bh^4p|-G7|ڸ=Bx)kre_f |Nm8p5H!jR@Aiߒ߈ۥLFTk"5l9O'ϓl5x|_®&&n]#r̥jOڧK)lsXg\{Md-% >~Ӈ/( [ycy`ðSmn_O;3=Av3LA׊onxlM?~n Θ5 ӂxzPMcVQ@ӤomY42nrQ\'"P؝J7g+#!k{paqTԫ?o?VU}aK q;T0zqaj0"2p؋9~bޏt>$AZLk;3qUlWU Ry==+mj(^>c/"ɭex^k$# $V :]PGszy!EZ]DaUS@''mhSt6"+ҶT M6rN+LxE>^DݮEڬTk1+trǴ5RHİ{qJ\}X` >+%ni3+(0m8HЭ*zAep!*)jxG:Up~gfu#x~ .2ןGRLIۘT==!TlN3ӆv%#oV}N~ˊc,_,=COU C],Ϣa!L}sy}u\0U'&2ihbvz=.ӟk ez\ƚO; -%M>AzzGvݑT58ry\wW|~3Ԟ_f&OC"msht: rF<SYi&It1!ʐDN q$0Y&Hv]9Zq=N1/u&%].]y#z18m@n1YHR=53hHT( Q(e@-#!'^AK$wTg1!H$|HBTf̋ Y@Mwq[Fī h[W,Ê=j8&d ԋU.I{7O=%iG|xqBչ̋@1+^.r%V12, _&/j"2@+ wm 4\xNtˆ;1ditQyc,m+-!sFɸv'IJ-tH{ "KFnLRH+H6Er$igsϦ>QKwҰ]Mfj8dqV+"/fC Q`B 6כy^SL[bJgW^;zA6hrH#< 1= F8) 򃟤,ŏd7>WKĉ~b2KQdk6՛tgYͼ#$eooԦ=#&d.09DHN>AK|s:.HDŽ">#%zNEt"tLvfkB|rN`)81 &ӭsēj\4iO,H̎<ߥ諵z/f]v2 0t[U;;+8&b=zwɓJ``FiQg9XʐoHKFϗ;gQZg܉?^_ XC.l.;oX]}:>3K0R|WD\hnZm֏op};ԫ^(fL}0/E>ƥN7OQ.8[ʔh,Rt:p<0-ʁקiߟt[A3)i>3Z i򩸉*ΏlA" &:1;O]-wgϊ)hn&i'v"/ͤqr@8!̴G~7u5/>HB)iYBAXKL =Z@ >lN%hwiiUsIA8Y&=*2 5I bHb3Lh!ޒh7YJt*CyJÄFKKùMt}.l^]El>NK|//f&!B {&g\,}F)L b߀My6Õw7[{Gqzfz3_X !xJ8T<2!)^_ďǂ.\-d)Kl1헐Z1WMʜ5$)M1Lʳsw5ǫR^v|t$VȖA+Lܑ,҂+sM/ѭy)_ÕNvc*@k]ן;trȫpeoxӻo_nfz6ؘҊ?b*bj^Tc?m%3-$h`EbDC;.j0X1dR? ^}Ծե4NI ܓR{Omu/~+^K9>lIxpI"wS S 'MV+Z:H2d,P4J8 L72?og1>b$]ObsKx̊y`bE&>XYs䀚EƂ@K?n>lhTm' nܡvO+0fqf٠r,$/Zt-1-dė}2Or@3?]^ʧM <mBɃkQ }^an.Fg86}I h5&XӘ8,>b _ z>9!Z>gUŞ}xTL̵ F8ՅX/!gqwߑZȖF 3U>gCCY Hsc`% s8,A_R$קQM17h\EL#w@>omJ/ŵ_iݼGw eIJipFrO{uqy/]c 2ėi_e}L~5&lҬt񗽐0/λL[H* JzeMlTr &|R 2ӗh$cdk?vy̦7]Ạ8ph?z]W_MqKJ> QA^"nYG0_8`N 7{Puٽ/}3ymGqF8RŔ.MMWrO»HzC7ݴLLƓxxi2mW4*@`tF)sngir^$W v:?_ͬ5kݰw[!$d[1Vڄ`[nUgu$ B6 [^7 |Xpn1]nr CC5`F`J `rKJ;?28¢E WiBhFa[|ݩSRO3]J-҅31,jl3Y QuH vΎ]n_2a62;VI/ɮ|Lu>'$0&*m.)HzzBvU0h} -_.7^nya+Cs 6K!x^' ^7 l 2Jj.S֔(*CjaS:vp/N6I*x8"EȿQa[qVM/)fpOj4r!:V_IG^nILVG#A7jF};qPU嗈M9VS;a+Ӧ8E8zmMs*7NM~@6 ' 8jp*:'SOANa0rӍ?DT%l)gvN}JT(Ȋqm|dc+lQai,|Dߟ|, d#EjZܴv]pEO7}&gbXԈedKX :+Z|p8"81,w:$TiVD7ֶ]cga@>\X=4OZSܿ* %xccDa.E h :R.qɱMu$ơI8>^V Y. ,BLq~z&0o- ,BLqfx9y:9244ANb n\"X>Y`bb*h%)(*_Gra^ sh6"BzƾH( ."e)B QlKlXt҈t9՚$ضz]'.!-r"1MCĦʸ"66pE{ =CNc\ESD[T4azry !5yY~ :3;Y[Iȧ q:i Ǟ/"8Wxç,vܰtX-LE7 |-D`JLw9|fb>4Nu ߏ3ap5k_JA+A.A~ C~`[KaQ-Ģn9ѧf q:cT >to^ X]j?-ȇlCf0hM`~ ó}0W@o  K[{d+`ze"l |d;L2k%x90ݙ^Oe ]nHfS+.4<#/5߁ݛǪ0q,7FeV/!; 瓠 Li% z}ɯww"O-]J`sdN$@"J`Y13K/9`VTElsX|D^c%֯T][$m;ԝ!,Z5f`XFzȁ=nrSA8; P=uY}r/27OUa%~0;үM3Tu ȩ*'3IC~LG,?.?C3tBYpm_g.~>3ʄ55[c&-Wgy_jVo,?s w*n\7[cpMY<~/"˘oV܉T6nn \_ߋV_}Z=k-nn sn.*upw pX\_ U-C_wS!|q?E-S_w$-#9?mh{R 4ѭm_9p -h2 dֲ 1"j {]]Nk"䁖%5'32hDz O\!f3KX0kIKq"H~%.b@:Oec6^:V8FDza5H`:&Q5 ^hI8nʁu EA~V O8Z-mYO!tO֠υ9G`6qmJc,Qh: ݢKNw2taC0Z' O > f-`:F_Ѫ2)sCj1THɩhS-^p b~?.>, `0!E%ҏ:H =VՑӄ| Ć.lL t1]}r^nʂI-|i*'yW='W6M$oeB,޳X$I6c>EK# 15ۑO2Jh)8Vgl0v/eNEU"Ik dRu˜6Uǖ xs%P ع omWl҈sApX!^ Ɩgv{Xn|$̇d`>1Ljn떚F+B9l"UP۾u2Ja>0c0Vvގj$]p^M+f~@9{bOe@7ȱ^%u~-B竟} |23 Z.`oqD>t@N _7c$h3`lg\)[h+pHBr^J |r\8czEnv@qZbRT1e8V Scc6:$[|a.fpU`ZR֩bKgTlѩynۢ, "1LӰW&jDkM~# (C>ϭQ3{ߤ%EN;?P%ٱm -{2k 8Vbv"wŏݙmn&O1^'}plM)0\n ή ?Cֲa9H] lX9^vCο -vd+OUgRy2Я\ B0!% #>bJPUck\Ul'F瘏Y4Ew`[x٘p,>9V"R1I>bJ` UL'5m1Ԥ:t6I >jz(:W֪Ƹ)!fꠗe[XLE4atGS1px#S]MF˦NJPYDX%ܠꡗhl}i9f?q>b-E'V"mNf""ŦK9kǍ-vU #`uVi<s)/=r=nlӗЩsdLyVIUI':4^6& t,O669Ȁ,EʿkڍfC58$5?DX 4q]ll9W@/zNaZf% >Ę_"+BLu>'Ɩ=xɮ[⠋X((6I#z)2S zp&m?e8 "(O+:Y EaSD]<^(]В|Ǚ8"oRs?]\McZ0ϕ!1hKS`h0O{!L-w]ln2&0Ǚ'0=.T4G7! H/ͺ|@lX)+{{^s1V63 ۗI"*al NJ`Q8B\pup6_3XqCXznL9:{o qcuו8`n{ave=}OR9~yL Z1=W8>É R$|L ]OfJl˪VVg:lDԒ͢Zu[kWۗw{{7st08`J0ꨴU1|z:9dX)z2!S:'q9| 76"Q;D*04Zٚ ?V¼r8/G:T6Fw/ɚ~h?lUc3MکEen壹n\殸,˛_uu.Jssu/*47U0)l?R_^Uon̝f-nnZTeuu nn/*0׷տ·sHH?Et _I`[>>0ւcz/Adh.$@bѨLtT=cKGX nݔ͆!`c|Lu_~ǴT?crO e9d ljB?K_z>p%'3JQK-͗R>KkΤOq,*I|0]Sj%|-Ԟ = Ʃ%>H&t;9`>$& nIdE Ͻq*nŘʰN+mY/%:v&cdAݔ5ΣarAP%+d|OOIي=gn__a^_>wƧ4l[&}}OCBzwzkel5k0sq};=GF瓥ZRkkUI;'Oc3ξ2|=6uó^;O<)㠨-(l?` @x-"(޴<>-0u\)B Oں{'iqJg3vmS$DSkP-?8[H-d)[^nՅ"wBѠL#uo#Ojf" 3^>`R+c3, X\ϣG%"euΫ~2 .3Pl{ ygY(U?2Z<Ӗl P Kgiَ0,=%f ={栙l$# ܮ4|=(o2E겈ǓY6 y#,ņ:>k<l`!L@K#M[Q"5<@ HoF^D(O@Q'G(eGB kT2hl7_,TѢ,('//IYk]ay:Y1JY[-p2~tVR'RXu%{)'aa#VK.ƣ?8F2ɖ &PB> Kwކ?ZxgԴ >P~EE_P => -#/xt3;<29H䩘eףvrF@ |ȆlOuð$:?r0goa|<#Íd.q+艇 ѱ41l%gi UWl3u÷܅6~GMS2Sܙiw7@ViUyȟI@1E鷏 T}wvg}F,ϲ(@{XOO[o Yt-QD\y(ݵ=)@]CUҠ&Ӫ<9Muyf/Wbi83Vv^ww q4fw\'Q]x'0=FizvwQy"e/4lV?Uf':y &ĥ*5!mGEU?dBP xhhw1((iv_W2㏑J53up~qE>$YsDBMăomU}/+hx &i`v^|`*.烙NzG3>㫴 Ǒ:Fo1P=$*:iX4(^D|f%A=-Q,,a86Γ49 ݟN/Y||i'"+ͤ;n ݁N7X縐MI+;!h=,,*Z4(֯.8غh{: Fy(6;nn^dx.f)!!4lo2|^"2ч#MB`>@7; QdQjq]IiCI*~!u6GYzduU44qגVɴ/hqDၡDpfQe2ծle!K6/5ZKW#q~xp_ppE#nZ/!~g$QdQh> bÙeYE9}x0ɋ,U\xRTd5D(JY7gChhl`"*V=mhÃeHԄ͚3D x DR6J ~WV #0/t"5KE fmDZ63RTZjH{!p2lFyWW[ArV簠>E qo+x청ȶOuTìBldyp =?eQsU^\s=Ċ9P+GH׷!g?ak.{$d:ҷ^B}+OWJ`Xz%1tm-RayP6- UO˨}㝜 wKط33^@c&RVuTi@LǶylěVZiVȵTzc/tBczHXh!.4za2X^\CْwcRXWRҰD0=on-,_LѧfTsa+R^Jg-y N\ nvג j,1͊H̉OQ8x&,Jm `nS3ĝ~ ,TUa̯,xL8{B}XjF :ycT*4^*ҟ"/jvtxD[;tu"]{FϷ)F"UӖD&bj}a+fϣ"R_: vV`6 LoYM+(*/{`Wy US2 u$ D=}WhET*ϖb-5}<<-*[dwW hB궾m͵ULK{#0Ŧ/`D(^2 an_D;[R.,Yqf|NT$gb8Cpw32uU~)%FT?6ol0gl*7mzɗx.^Y,zl-6oZXx=}?ʯ8,AFlNNmLF$ɡU9Mu.x%7]K:p%Z`EC>bZa >tUчѭi'Icr3yk#~rB[3ô!Ÿ^賐;麞L3p].< }AdW&!lF Nuh7n7+!=SES[AJu v΍UW|WVmZG,o6")a T8MAwKeCt*qOݦ<0mЁ%1cKzmʖHV[OIbZJNuhzD5e.ѾC>U:;DՂGO.ً7Tَn{ڱmQY~hJZYu}c~[lQ.+ WꪇPKY^䮪L# `8cwRUW2*g|dqJFA$켈08CNYvj{7(;+< 31M0/"JS)EIw 1 ;7$ySD&<' Ʌ2=|m98\)$e Ty>ϣ(8~Q]F+bwwӴ&,c[}Bu=uzOYlQ?Cw:f?U{:ķsUϾMJ;"y/ST%(ã٬̆ ]u<:ۏ_wo(ګ'6ЏĪk$^|Z whĻ-6 z] .ˉ뷯-<=ۻ?(`֍w\|˭M<7n'6c,7%:y qw`DvD~LjEp<]6 mYi:);j`_laFOwRo>7"!kBKp4`蛭^:+nq4 \_SV /pE=xpq]VoPMS~C:=GD^B82'ϋRLJ G%Q"Jɱg) t$=i0FΉl%^$` hP) hp{ݛ[-. zH f{#)TxmsT՗Т4a ֖C<[Z9M[L3=((k߁Ns}uUTzX; 4v|$iŽVj yw p_mL0O5I!6>rLM3~}I`m@fYN@BҀiA^WS~+(]n!(^Pm]RMGT*ݱVcnUU>jwGW6˖e[˶&*{l AeA-uMP瑂:[ʗ[ʷ&(| AeA-uMP[*[*T&xb AeA-MPz[/ o!nB`Y` A v4xArIqq&b o!a@Ο6'( -_<ߠhQou/2/.p:]=0&n(t?rͮWJB*Ec܈Io8nXWKL`lo>$n! nvqp}q)8RfQ۪yhF^2Rk?KSUڌ۳ leA}=:;iGJkWLjq4guM/l,O(bXd0-xU-<tZwZGEr%կ$ہh &/\8~u!Va\vUUoKg‚hY.\gVQ'vrƅ"AdD1Cͤ؎)')d* 0#7-<)_ 9[.L:}U)-ALi3ɂ#*ӫ vRI,\das:n0E"$aH3K0'yz,iP=K$ب|WskӈP-J5y=umn7`,4uO$]5`4ij^5,0^ITAlDD3@zGe=!9[`\+ RaT\6&dCYH5)|Ѩf bI5O-.-:k{읬Whc;xfIx~Io\-,kgw$ӂh,;bӏ֔ӫbj0Q;eHga%qCMH^3EWv8HC4}<9s^z1.@] 9۠S_sZ)u۫UjҗuW9*♹>"٣ 6/-=O`%ǭ*1^[Oy[W ~PiGy}d/hq-x]\_EgxC>J=tFm*wa_/,jDu 5Y߶7qcnsO0E0k i1$XtZ!X{E5ɕ @g7*30*K#[h1dU\efpՓmT&gߣFn6=f!M v5cSq6jQw^o7 7 ~i1`]_ _*uuiTZLۡ"iʼn<·ŽGՠ>Zb+u*2?>AeZ֣rI-.dlb"o5ID:Y6IP?!m2 ցj֦G !RC=`} kC7q{ U׌tmwhs/^d_;\Cv_P]6:` mĚ!yC@GƏpB(hnsCIɄawrMn=q-%W *9H[\C 7PGxX}Z? %X#3c y`4T7̻qr:㑴2c+#,Cw@o4WjX9I23H&P8/= CGɀY5C ~(, V6Xo-'jĽb#a` r7P2\j_Ͼ\Goyynk+ymCF0iŨ "5şMz.$s9`QDK%wCRH,U]@]1UxLSNgZ0ek tkMٺPnRiQzM\QMWxlHbT>] 'P@j klTlu:Y0R4O{*OK[R,h }5͍!k(bZX;=qm8~ ctdw8Dcmr #=."I+-lߜo<=6 I5f-33ÿM;#h qN-1Vs<dA6ǡ*孤KQ//4{Ў F]76{򇠏:hq]3UV!TI~/LT8OG)1q,OB9h~`@+ΗPx O/Ϣwmm%,b`}M`=J(CҒ`{IQ<'vLup-{|u?o~x-Jl~fйOO^>{F=q2XBևn>ѡ/.?ޣKo'>AO:Ƶ~{ ;M))O]N&᝛z+q kl:래yUz @w|3yDzʫYڌVU$LרXR22 j0p6aw1؉ݐŃ+va@|` 75ߘ|,<Ī6Rdt1 {8q1bN9%;icWVw,R ,$bQX =vZ N ^Ԇ2:hi)ru1ev1ؙ0;cd%μ.j=7p[#F˫KU~cyFCt)qpI=.hPYN2Q8A!z? M\Z>E 萊*NIIW1p>dsQT&kML(Y3ɂix N1%l;ja9):2 >S J j6UpAj@93ü$UAkHcA[u>&4-uAPsXqw"Fŝxo/wq 7SGǼ9n2GNHb~tϩ5kV,Ar%)-XVb$ױol9~%J8mX!DÔ"gks qbļ݁4iʐ8A{|PT=XxÝ\$3_ zIpTv0 WVki=VSKHƊ8X{-wxH5J\MߡG$-> !d@I@AszM,_v^Q^/gL/}yGJc{ ٱ\в֟A[sB\1)B/VP౥Œ;(bzQL441i}>&ǺΦU Uh8q\S` LbR1lu?q+jZC Z-8q:3x{P$Uٖ Π`$MܘU$Z>CLRa:].6{\$G#ס$J#,zryhd/%;$cP !aK= `XUXkP滁G$)]鼤$D$p7/&Y /8#«E&+G;jCi((HtZdl"X# BaAY:QX!6&(E j]۴p>'W$qrjζ@4r3@j3 >"$ ^"&UanpRLHYQjÝd얞ėrC 񪕷p9癅W&loy O0b3ZHYh07~I0SQ"$&È9?f~V+|(pׯTc=8umَO-:PNm]@x%dyI`F xi:Wۚ,=Yjt-V瑊Df3ʸ@m Ҥ2Q'q tO{$8vftJ47S! |aAƈaOS[_:ySc8 LNP9%eg4˾G5H"Fҟ % zlȚզ .z<%k 1Fd`'r!}S%n@cle'Dag9wnqrV S/paEs+y{N 1u9r&ѕ+sgMDi" k;N3H56!Q`UJ֒DM{p #} #N FHLH{[`>9J1}r1W7‰sLzRL&yc$mX\Cwl"AҨ[fE(BPY3g Q ] ncD8|=,Ȥ0Z1HJn,H=//)\Tp. (>jnn{rJ [ eb$ހ^YO12^Hp\evV@٩(G3nȰp;*"A<{>݄!0.GrDc0j9MP5M#I N lkZ?36愶`$љ |mg[hK"npM9eUȜը4Lp@Lw L-!FOq!ոwT; "j|X%)d"#i@f\Q`flfЦhkOa. xDJΊ/xB( q ܋Dx=p k{nɎ57\z@2 ~dkbu7,m{r :Z7f[d" r3k0bE[\S(NvOKPm' RY5v^-%WHj+(}AOC#S)h eKLbZmE^kӪqUb)WâB5qPq6{x+:I3)qHӳ̡5R\kg*11JzEꞚ!n@9цc֟n [F_FVW%!Ե20`TC{^D %|5A] _WymsK)#n9t&[nk'wŐ8x8ya ;k#j(R I`ZpVNhkHkcf5xVawU S"7YF2[]gc. l !ǻf# s6~AUJ̊pՀ/ rG: d \(YL!kX޾$X ;<~` %N7 {5&8]X(xռ'jc oYoDvrV?C<#]S3O:=ü26JL%/^So^]y 3lC Brd*&sLC<}zpCJZbfaİMݙX+Ӗ^50ʉ3~Z_Z& CspxmNg2ڃP2PxkzHɬ%_YʹB&VdF->=Hop(N\Bg0;SD&VKЅ[$#7/h2(Ug~Lr (, U-Fp㰍?t9 -uԳ"'dU1䄄W+:aƤS*Y 6)$ }WAiLb+YjiU@#\nIp>">d fn}(vWjA)IjF)mlZ Βks"xnNq؟o͌NrqY{^]֥Ϡ#id7+l8BO^!x C Dΰꪧz8AiQ|ߤ]_yRi3{|u15¼1%a%V#}@g̹O`VqA |O /7}%~38d%=9>#-atYprV.' QY,BOʫфu;AQTZ}GhL1u_9#g=ߋqv &?Y~j) ueOџgh,Ȓ~ptZ'Yl.D-I6%XVҜ-oN>u b`-Qx\JgJy \`0\{n=ccCAܭZDg/%>?A`oɅOo=~?Eғ_;Ns;DShp0~XaXf^<]8|*"|'~&G~09aE ?⽷vM@x<sIašl hGcW ؎~vzG[!@ ډy0j=EVyWD*O=ɼ1t`ReRKVW~2c{qK  S\EWVUr^NhBhˑ| i+![ L+N,FNrX;+)jDU3[pI@Jg)-!laF*frW0bButVP0d0!ɺG}'l٩gWlhC̤;;M~:4@D`JMJBx7!Jz_g PuQGnů>xQWڑ^Y|vgdy|~ur\YKGw]QwUʺK$ao]J;!_]޵O\( ԭ7l]DvgQFR BО⒀kYC#1g;~zo3ݝ9b;GlPxav^zrgo}|_: va dj9J4ֽU Jlԙ$S6mhZZ8<$ȕv +W AVLé".2\Y/Iv9\EYwr=:* r tr5I;{]35xpo2' .8I,%P;R6+[ʦ<BTߋKZ \=pvy-8cUA1+^Ȼ^u]n6$TѾ/ƽrwnNU_e ǜ\PqoTi +&)vV;rvRۉ͡H(m|#Qn3Q+)"`\t¸(d0n~p 1S^ x]5SxQj.ϫ0}9nC_E?w>TX|w ^LT뉔~0I6 "l cԜiYA4H4fbMGY4 f K4FiAZxOgCV_@)qgψQ0rCK5J0qgv ʥ(Av_͏:trxW?QJ3sں|&qJ{gކo yv㢼}vyBjj+ɷQJfi2wt|{XRUH=qہ5:6/\m c@V;=xpqk9FjЅusDvYۈ`v͑gpEC9җ&+[+]azvԊ w Ώl{q9Wz_ </嗔zse37MvgBK-t%+,qB툏hUiJ[l ʦ& ? }ޯDhl?we"oO!GB;b+4~/h4z ٳ6ix6dn Ƒ~9S945b[&m0+YSi;?^8 W/{hpI{-4 0S{)q\@<g HVvaM'm>TΩ(+ϫ/×x~1JrF ˺w71Jz!.z[T@.Xj R渳y4E1B0cD| &1ayz]zf ) Qc<~ig,(h%m~2'@_Ű>06vsWg伓ߕbUsD;$GO@wnL1 ')|!y25/lFAPeX넎5cVlF3s8jveP"KXx6t$Zu8Ộ" C.ndWG=Gnm* SjܔڔfJcԦ@'[} $~0.vҏîɲb,KoTG${ ?σ0?}. dYsZK}4_Z`? qvZ1h;/wJrdDT+)lqtEeέC5ޒY nŤC +%f\Tg]ͺZhR\G[ k%ØWRd+fs9Ayڋ 0G%9S$蜩NOK ?UxOd+y< Њo@UT'jVxg$I&j7FaO;Q(&E[!X2E N0nsh8\oRUYX%m0=m1V H "|9;>]/ЬL NLucջ"@FUm2>CfxD~#6+[n'+2@Sq~|a8p1&:6W2OWt-A"<>:X]Oo:Pp`#; nCJHqJh-92kpY[yFu_"Q^k-(zά&[P\I`*3C&pfhJ5A2B2VS Ӗ ޥ B%Rcm(뙡JԘe5*G7gAZ ٔp 勃; nHM(jr}jUUBcQQ쉵S<fB)BXC VQkXʼnV4 RJ )cje0&hA^sF;,` ש`,R%;bI`d]x. ا2mY6C@)'-C5=yqِ9!t 0vU[ -ǧ un.ؗ@8msPӧ[3B0/|+??暇1U??cP1z& 2MjvgMU1Cp5_;`7Y~kɴ=ߞ+,$~s&aPa gM",1SF$Sԙ B&! >ANahvڥL }b_ϜFBTN8&Haa$8v >=|*U昣R *z%Hq向"0 ܫyĎɎ b>ad|-%ElPdDxذ= ?~y0yO@[6VQ cJ‚F' 8^pO9S GԠzuS'j;5(B5NecL|D )?Uس}){* ZT3t 4dZ3ItDȘh_ݓd {;io_^ ue Kl0z*'3t6~:!*-=h1OfN=TB)RRC|t>18Ĕg X,3T"RXʍ6-jO!Pێ6ۏcTdZ?s(p,1McX$H#."64.Vsayz\qPnVG'|Nb1\<`C!UBPW7o:e/\%v3{6Y4õz,cpjq^;E#8 ub7UW/RX> %mڱyV Jy8̈́T`f2)˴dHHܠg,Ee+W#g (` ;,M&("*ϲL_&RBQ9uIҎ+%} ^1p׼s %h fvRJ/%d_Ac&kpGEs`]s@c5/d}LP- # h=GupiI?@Q D^cD,%ܛACE-x0BhPp\ f\Yk)p֥, wN|5Jumz,ldWfTLEĈν뎳ֆG DA(SYջ;}Ixt2\qXj|u>x1q)S34C&Zj;]م3ŷF)U\Y Ԥ bO^.op_sQXn9BzD XPF{r$j!>h:¾4ߘСH+i~S%L1H$woVWn=0 '@hٯ;bGSN{.Yw $`g 1gcćb1GHf 1`o'eXDWw՝zxg奙+yOb}\>q\~jirf{j3]$u~**aȨb#roxiX̘p7IS0 v8FE&ڴs b )}Q΅f`; K;,!!b(RK&Ȏ_LTGbA9H, .}Y 7Zi%h s6${}a2"mA_QOL^Vo? t'G /V.].B%|)}mMXBe cԉJOL)T!]>Ap63-L_:м? =ir */SQbV? /  =r3Y3‚D1bzhO^tD9h2[yA}x?Zm$V{> +уl"513XEQ\\-hws+bMJu(;"ԍh~[BǢROM^L9 }C:|7t2Lnݼe֯AS͋g%qInh5GPxSMuZ?>zz AoZ%"J3(E'v\PWK&>BXa:g46 &zvK] Evh"}C >)?zbrU0-V'..-*yzpv4Bh a=S?8yAZ NPCM߽_9'kl A],y96|2_^NǍCځ߂]Mm)uɴ @ߡSUQכgݏ=7#'iEVÿj}5ІIB(ZEW ]g4T@g0C"\ EAz0mLP Ϸ|GYŜ1~!՘!8B5?B@PozB;cSB菬wHOڔmW/ݹJݷ8ԵɍՕm9nb6Ym@[g I"N"kS2􉋺r@ q-XIքʚVI֜R>Yl `ݵ QJhG؞#@) Xq~ІQڔB1\88_ƌ9R#ڛ6Ҧe)S6m P>1,4c&R$1W5VTJSV|ͫ Sg ߷I(G_')Ȑ0jΚW-Fi^rߵԍnSTv\6;*VǬ^yv.JJ61dTA֊,$p tz o7^TW 6n\G9 ǞϢ@5yaD|Y<7mq~N`emf?~5l2u#wQy֦s جx_1T@wW),Y[JJ_̏Ss6/?|5ZKkf$eZo†__"/RKpT2>zu/l9J\b&x2ٗ=dŵsPgfbL+ky,.(U]-6H!k ,Su:^Z/užiѯmϸ)jZ䀵W/}~p.<&uQ!ԵAZiI^;MtK ".kwKԕU#-%[#Ÿ`Ha*[iKZ&=)Ga"WUݒ>z#}S~ZH`n?@ 2g?z4eWrϣ ~ g-O@E^A~?y"3ExuMIEz~}7`[gJǁ^Zz-F]Q:q0)o0^G+'>m̴IgđQdc-9S[$sm(bOB;s!S&ubJi3Ed6%Vd c =U/jEEs}ED4٧BK9@AkE0oV378ve1Kh$)#t z*ː]r2W헗[@= p0oyS3L6u)p07}vz҉ZA|}3ަӪ U0g;f{~w9Z}U3kzEݚ^_9|߼շwQІ:;UW'eK-7}JغQ^lb0NJa"XvIF_iHYjSfJX,#&Qɦ?%W/7p67ynAѿ:'@z SY r:s:]ڞH/ I\4RFj F>durjn~}_g5UrU0.nV׊j.gWy:^כ'l/ᄌȍ Mgv|e>2Vi?ϦUr^L/IM:˾ϵ٨( bRJcnt5+:ӻJ2iMbx (:ۯq#ׇf=j6Rg+~>3j30S2wv"Ͽ@N|C{c?1ƈ"dt߿>'{ȍJG7Gigvçq=sY:EQl}RQdIY,V ХNdV}D%28 T3Tsx6|a[wyGug%1?EkP_Ew{U{>fNۘK1~pCi^1E5Qnx4}:ự<|߃je 8u6LC䛿i\fFگm{w݄Cp{ꙕS}NU}7Z !+P}Wyrzѧejxt#8>+|:zK|q_݂aR]۟r[&|E?@ϧc"cV՗^/$?&tߪlSUw<~ӳ~ӿ?6]ƷF5ǟu<5MܥY6gwO*X~"AYAVLۊ."yY~}Vwϛsc^yCuhPp29C(e~OlhHV#ͤEr 8B|z}e_? D$1$ʒW:*GZd h@# n4~XJrp]qjie^x) >ɉyoT>:rv-fe[y_lTڻZM%+N^c䵱B K$r;JkJN ؁aGl3ؓt^GmmЛ@d08 c.a*E /GwG_Ux]ECh#QK΄),©EZd h@# 9tok\tPޜW6޽X^NTi;1,h-`hoR J^"I3X&{l+آb>$9z(|Gv^vqfef8ܳu \q}q5ٮEZ;ky"3w4W!,j(򾠺ƒ~5 TI +i⒌DQ8Ӟ M,m#l-[Wѳ;iJ u NhI*IaR{L`%-jӦE"(Z~U$/@`H&H;1KkhvR͎*o" VyOakj@MEr3`xiK$fF  2)edN+Ɏ^K 5/j<7 }lB{@XRqJ8g8l*+ 8L?*7|@4|MRgyiUrDr:sH\ocq_""V7/Q}a֬Gy ^XJ0*`y9#Gir]=ӶEK;.ǻvsc>G\Uv"IT5i"x4(KVM"FEꙣDHBy#VrYT(8aC(!UoY0{i\AwhhlÚ8%I4Y5ZN։E}]7 *-/`s%[%oyDAF]&,RE`6ED1WP 8mj3H FǴRop:-/Œ\R+ Yr8N$}KR6Ljiz x4(򾐫EFeʼnlVU*H=MQ}!7ߖjJtzP{6iPO{|oWi[D~K>>,`kxYHVi̺ǂ#4^u$o@ jC;:ȧ:ڇ(@X()x"nK<''&C'( K> [yrSDY y_pm*T 4fNá oyT`;+awHȖq#0+b+JI&i\ !)A6Y1!->=fi]y_tI s's"v'x~g<PU$2R?f/nOK+ M~s2[kK1N "WJ%OIW 4+P}WJ ûr ʓ+N Twep[Yt Q}A]we]nu tyQnrKJ;?_w8>3Xǁ=D2I ^ErEGLN1S;rTD'z3ͪŷ/e`R'j<НD:u:StWJVWb:x1Y ļH {vMt@UA{"nUA@ xwFTx:*\bZ0^txM*JƔ(!Cf{sa$m8L/}")w̢Cd*P1EON+*RҚN♀.[AZS1}#f#y_5-FՎ}Ԝ]9uS%%&jK}|X'v-2 ǹ'gTn:.M3\ܨqn!Jx0b,ϼkVI "x?1m^yS>ߖnz/椨<)$}]-|}Q\nhpGf;2ŷLɯ >imBʝ 82A$KOVi"ֲϙ]@vTB˖Tŕ[ +N96\ѡÞf~V <^8U:l8m*zʋ("Rr;#Ր;F5G:|cћ%~7+iE^=?o3 o"nͶr_/zɎWuPm;;Xū+K>3SȭP=52I˨&-VPxi_.)Kavp" .e[O<% y_ȥ}s)%F8r|u6#o61f2zЦi"W|QN/(1'^fEZ'ַ-V >e7B x:h?j;eП;d~-2JZ;ܾ^.\-!e#jkL, B,kzY(:ZdoVT YKw܎3WjQuphBGNr}[Byͣ9WM5" %֥RJ<_(.dZ|;<*2CeΙ![S?&-Q0yhwILN (X0*SwttICUS5\AЎXSk$.[!)[C<9GQ]|^Gݡ#F( qe_$TtXn#x4Z eq6V>M" 9BOTHkg [n_Im+Fl3x~#(cF,D4?7Pj<.ZL+29ACHzY'-rpT߉mtxnǡS>G޻܁04?b]SB+hs98-2 nxp*"ʣZd\'ĀS[AZN”W6ΊBW^c51'C(W\K>}ojoPⵐEJ"/JV8v=\UCb m#ɔ(E "zxD1t^,lNkFw&`]r^IO`ʸI/N ,t Wu["=>/*J65&0<'Pn|>kX ך '*qy+c x4y_ȝ-(o?yB Q;[d; Wc6=0!`&S <'>T㜧 yT@aBnL1P|&+ f)/*x# ) pL=Mт r֛l)c''/#E>U#6&ǾNQ2Eѣ}#zf#zy_Gr̅m%v9K39ud)Ȋ9ʻc ]g>cң*X0/#vzj?w؀kݒکںu9T&J(=Dୌ}GbE+bnsu$sޫ(z4L %IҤp^,ʥIh ")RjV*vO_ds|j2}Lי|=ӿܴTw 춬ki|Ƚ֟ kh#l7/aA2?l(\lJL rGbU~OT0/^uoT* Ar:pjO?M_g7Z~hhfw4VwuQڰvgc~~芨˂>~좵Arڦ~hn%(&U".0Y!9X0=й!0\U=AylD$CAryCp:YgF QMS` ֠vBkzt 6v>9N>!H9H!`mֱbm*(kmAZV8cMAftܦ03ö}ZČ"Zc HN4JI$5li+ڲj "R)~؍ԝV xhSfu~4}zkhj-H1"` :;.ULCH5= ѥOdx)RGEwlfg9w6cI{F}ǀ𘞺 RK*ZVF^<;R/ׯ oK8edo-Bʊâp?m[0?:rNNAfg/Z3It[:xnt\Esaeu(N3u#L9\>K#R'DXcri4BN.m\>aގN=RV3zě(%B@a"}{!P Zm"2H D[x͵Oir"MLt8RI>|4kga]GmsNKx}ӻ0:x|&栩J2W}^yV)"Xn~㈕p>XXb>W_::Ipq$ߜO}kR9EX*gV+qcH@8??=*ȞUC/͒ȘHXeY,P.hZpع ʁϦs?w>_B(dX.eR>aR׿Ugi z/?= HTihx#SDnb-{#qcᜃG@a$kR%o)-[H7g{˙os[uK!gRD5qcM¢T^R;MB.&n^Ūd''goot]IupOe~RҥxZG9P`w> ۻUJYǷ귓f1JC[kȇA(:viKfFW(N]myRX?֭UP54Y5 |Ⱥm\X÷%KS9ښs ˭ ԩ5hEܥs ZUZ[#IhÁ/ RFJ ?+%QD:?kbS7:=ϯSqNO:|^D%*bE՞f.7/4wShתjT"RI_4QmoS};DkaFo" B3V#m\Bd*ap>1P3MJźOG9q'0ʒHZpqiگuY$̱ e]^rQκ YkUcfԨEܥj/_u07v1K(N> NKC_bFo<IR Ì`ǩ0 ԏLg>1 qrJy8R(fDdY 6>8nN$fPԛ6!:4p(9*E;Dw#fFg(`ND2q 4I*L{|[E ҤOFrzsg7 lA0]L**7$D\x A8)Rqj`3rFHoHx `!<^{i8bȽT)΄<$QT{x ,sBs,5f.?4aoTԛ6!F041aL0 wiz؟Sǩ7EjCܥdj wе!^hM!mqb1QЖ镕tlmJQ!>qTMf.Y|L`׍+Ϗ޺nD3 Ps2mϿzE F(bWOc%(:#D{&&VQN[8MCVfFm(hL:ʋ[ nb|x+^y5up_4Yy Kd9A|%1%V{عR`uQ1ԛ76!j%T]+r1S׽O?/!/[P+A%Û6Z~*QhӦ ˑPuĸyZ|ZVg\CqSnHfPcۦ e W PISLg3Δ/{08yDy Q#'m,Q&TNd4%Kx(Mv3Z-qgxtṂT|bj%ǧ1R(LhMdU1l=ĨCCy[^Ip;$*!Wa')B튨%{,T}6Xa Pg| 3,X6Hr-:ŕ]w>1\&"3Uk&N82$Q~'2IsƥY6fx-e&5&Oo'!Mi 7Qp&%&΂-HΤ:FSĹ6`<2)1#ncJR1rЮbUSoJw&~U4`Fsx+K!eLGRȖ]/?$*\Y ͌wjjjRƴysqx:uRK_/,FekS>]p ל|d(kWU>s뇯|,(`FCn ZTiHqG,iёQl_l @=ȂI<&W;T]cMݼ<ƶ%C0NҨι[7)0~ 16]Oc.!P^9XWac]DլLkld6AO󑙔uEԲ;B<2AɣvB{Y뎫Ţ@%U{nФc 0k lxdž'n,8!(~kFD,'*$e$OMaS}=JDخD q7AY%Y@tWo?{7ji "&Q>nc1~')`7m~0P҇u+}t+)e:h~WubUOJ&#@!tD4.In'ƬIn3Mp |i rt6q$bd+NxXN~&7;i/K-w gDF tkԳYXQo>ŵ^IB'&oFR/7*WNIaWWL~RPq2OG7MWdOzOW?x=Ò'jddsQbM$&)gzȭ_ e8ŀL6 a>p2lDI *lmSQw8~..GEc6 Fa*Wٖ:;B?c~y_sK,ci Q6SȹA9'"O?Hi"La qMٰ1ʶ')wv.3Ca>Qnߔgt%~}^d([V儏 * G%VP9h/Jگ]YLRSoRoR@ HTl;9w7KhK]3gcjqa_X.$_ ^WV( (L "Va Ƅz::u+.ޕB|Hd]#|y *)ϐV4?~VՆ䏃hW;< ѹ@%Y@9'V!Mu 9)mwK %%Utqlby՞18UFGxdIH߱R a/ 6N _]Bq2%9OIjz3o^OA fwq03B6brC/L ބrZ۟ReWlv?Vc&)5jԙq<#>,^8wZeSn.D%*Y2ŭD˫yp^SJ{+E<X^HyLkm$NI!N l,Mr:QD뷓by~>ne]%bWݜ GNs,\Z:U{IlHF ;aSGcY%(\'=T@J˝"B|֩[=xqPn xl@CuTP9u*ht|mnN5IXðBMIV5{6yj_{,9a*-A7;yNd{1OEm/nvVs>s>_?~,?wo{sOl*{N_"%spnQ>f 5}1Hȅ= 6ܝ%i(4*cRTSIH/`K4='tg 2s閣-**CwL+0fșg7^,EvX1C&|@h>Kmmg&>e203hg)(Vx+"/QP$ {aB[.f^G6")ĩ^c. 鳻tU]F0fOMˣ.]ve1'Iza* ?Sfz(R^lxGX)4ἷi٬1TIYBw)$2aZZb҂-GFa&LA,~ىsFR贏 e̓K)"|_NjTKIݵkRgJgc #|4{$ U* ̌(XQoStk帱8y\ _K!NNn"T4#BR伿 .CJf+k^L"yJӌ,))T4Ζpnylɵw|:IKbJ)&nQѶK&`NNNk,і:;BP?HTfbQ\Uh4W2 M@H⽔ޚ}g8OhKhJ&JecE((xiI5bJΘ#ճ$ہ_$}"%UIa1fv].;&I|/4y Pͅ%n:G%t'f` ZV,1*.DY%ܕʣ;_M^O+SB|~/HT}yݤ;Q.µ}z݆:(g,&T"E7`qy%;BiiXQh+!Bi P"g>HOhR/00nc{?0 N L.޺vQYl %, qrQKI7`/k)\n (Zk]H,J7a i ^ aĥؙg)^&N"s>y`p!SܮK|*9qe`$e>mLݴUW*ښwvnoB9u΃mp_m"ldA17`S_mCaߜ}+At"nۭUζj«2gUSnbWONX 槕Y-\Q~)|n]g  ch8{2+ډ{s=/aYM?mӴjDNפ'~/I\#ƘC-JD1,<\mIRhf o0m\__l4#N#ϟ=}ti |CݍwEKw ߉= !@pHV[W]ۜQD2O`q򸼿m_yD6l?lQTI`׫_4x3FMzWQX/ma>#Ʃz/ "(qXFTp^/ ʦѽoJ!=K~R}8Q{.77HPƄPRJ{JNdKaGڗ9O㨤~/.߱0^3C#L0,FZnaG::<ʗX6Gb.bְ°`V{w|)- 2x/-tK08Ƿk~ź:I7i_DIQ کK{ #(DI_\9K\SCLBq;<%Q^DY/'҂'a?jDNRbmo6Ž}܁-=P [[ګ;+_o#\o'G7"[Fgo{[y9 `h80ċ#[ /N9 Za[b(IIĵH k ,";>llU--iFjREj6M:۹Tbmtev 9/Vk'{u  дƯ >B{2GE-\IcIY(dRS(a8O(d<ÄwhÉ"[!Uᢪc6'᱉M5 }X"oEp8'̓.JJn]]{*)c$!o/5LD/ͦaڎ( ,]и'Cr;xûPr®xH+ã08oj_o,VFYȲ[%NơkN))Qhpaf,vݬVzR ; YVHxza@2w;_5GjGap(xnKrR ܢvqܾI]`ma@x\J?'ã08\݆ [.(G,5CcD;}rW%R( /4M'%Oh-8ƷMIߵٴ2( tP3Pը TuUƄ-A(Eq]?\Pdp56v7[8=@:D:L0.9|zŏe J<<ʂhw@?hKDn=7NU U,a.OM4k΢'|ӠVE۴.ʹ#ɂs~a\^84vlb9dB} cC(%B#r7x>kH+p( R%\FJu\WׄP( /s冤 8zxG-nW>NUC=oml$p0X-QZ;C0/j@'ĺ 8XdT *l]]rna2T6x9@-LҖ{x\IâC+DGT(\o*B,OWM1â21J8&z <8FX3xOK;D ϶? ȶ4_lp"Al(n?aͣ08R,څU\ MNZJu{xGXG/q# ^Sr(T * 2juUC<ʂCpf\2ZBebã08t}^WaCCǒ}!ky7vyۙ (ݤX>&aڃNhxa88]Orv5|s$w E Dιri>ã08RAdL#_ 's! i81RP?Qlq)XǍCVDyÓqኘӥӗwIRi Y9qPmP )Ab}I4&O keOq([xҹJU/Ρ'-1J3F%ER'iܕQ&ts))bXAnHKB| m&p&˩kcj@)gkS)!h|`@K҅`,M {SaX C2R||`5hUeH<&P7I/8p.t<žâ`};Ui_oͣ08@ ?N٬GapYi VӊXn`65"pUltwIyg/F@/{Ing*IMF*jE=ܸvlxM@rg9exɭ1*S :̢04H ^R[Eӻ!ڙX4p0LE Uaoodu"HF#CB*z`:gqDTO'_M Q V峤~:#gFs I$AʯU5Hs{x?]UCj{ʬ(h" ej°oGaprt;,gטPLjsg6OoآEODK?`T9KrZ( Zf qO=E #b+Iue!X L䏳69KRsgDHd #K~aG!%5i(-]> b{x'C÷ڴ}AQ㗲M idhBqTWhFbDP2uJ34!>"H1-r EM3dzt$ wͪ0Z<|0,X\nE7Tbmj~5dwk% ڥN'bP7$f32{gQ/ He6lx|{x)_-tϘ>+7{R[1!뷇GYp85kRbG= ##m1) i ) |ch2+< C"On5pwEahކ* _`9z̈́j&h߲}79.DZ4'&IcjA9ytN*Zwm/W9kyL@fA%J!;uƛQy,kĮ0j*Vi1MP*<E&l+IP #5g"WTX( f+ YR`s N#Β$<4$9 I 8GŁ S b9fͪ0Z,=“=sxk^nc7٦~iMӛg2P$~^&l>lVoK)Nwe$^&]O?nxdh7}&rOơYO눅E)x?BzK8l w{`l,v 06L,Y/;jÍJWt/l>S_>\\c|0(OG &pOZw;yqA{7 u'H(lE-.; ֧M0c |8Q_JNj˕T#l+߃Qg+4 Za#<֚ QylO4[dAk5ӗʩgz0݇+Nzc$8~=*A(kb  A3|p r5UJUQMM*o ůoׯ'`սPV {Ցu?V׻o>6j_*.8Կzx|2ݖqC}na;D`Kx]qPgQm%=%F[F F5 bR֒{WƑJdHKuGq&8AB2ɰ)EbnM(K"٬߻ ؼ(?]i9ҭ|_GYq , Obr{BDkW92ԧ[h̊+ds ׳}|]߅M`jOF ?~zޥAAq>/Bnz)/pܙCe^׆}#)MגܩZ4aw`e& Vw?2|*^#X/M-l1iUF6EY;|2*/ic&ǀp(Pʍ"6YudXD -J))8C8{"UB;rq:['n mw0I~2[ӂ4_kaT+z8Қ6ߌa[@d!rFAJ#"ᯠ@dSo1c@dXH>y0E4^JJd 1,r9 Q ul_D6%!1\7>p;]Eupc_=6*ȧ/-TM/;eTPϤt?mfG3`wT<藔 XŚE1qm [䟻,;'w!,5Ɂae$ש+a*j+c/&5B|vZjXøJaCQZPB'&+>F#xfBͲƹa7Hs):HPp)rsN9oݜІj|5Fto# pQE6;_"״bt [RyJOj*#Ai2'{59B`g]UO-hw֜46{%t|l5VE q0=pbN.犒>dcfJׇ<'X`T9FRM-"`"RSFDD b FрG!eLD l/OM窻@uҦYNhxdѡ\L]use~}[Ƣc\lA10 f1grH8 e\'OgΔA3A0ɵ r B($<3cĜLFXp4.Lm.'NZa\;n' Hnf<_$Wm1vdf==Q!UJeW)֥*uҺJi]=[jLY0Z2gs6. TUDߧ_[D%虓3{[R^NEB[ m]$uܸ7#cX8fVR#a RKG*& 10n 1D oi6ПfU >B=;zUl]\S95),Oy{wuPgC$)EJz^,&q]gsItN1a ;!$H@r##2LvubGbk廰,ݕӮ|WWZ]]|:̰F] }d41jr#&>W! Aa|n>tD~1@lpGV>A:؈W~ˇp õ 2j-D4y%Jz5ːdƠ+B4]n{I$pvyb[&wϞ]k9XF4*Ղ4Ϋ{$&gcC)ֻHք ͧn:atö,'w4a(1@"h X0˸ Hɜŭ "z<:_ux#w"خS”u;"E Rr@İ4N+!H&ȝ,w_~, [}sbV̺ ;-pXEjLBbW+e㈤QpQO\GOB)Yw5=մ #+cviZra%^xF(a<Űoke20"aK 8cY}Λtl7gSNJqBgʗ'<~-7(,|w~{t,#ϛMejso 'ٻRS" V]KBo23٧FNFK'9Fj,fW휸|hD68Sڊ$/FR=WZ>>29aXQ2Ng-^W-"9HI9Pa Lqak`J$JP12.|z^滟 ) >}8ݨ'Keٴ߾e[ˋi1(bD";`)"'#tr).s'(•>)&p]Ϧ@08;YB\:$Uh. pgXȞI"a0gФ48M,M(zdʫ|oIۭT<wx&,iUJV$+ uosK&O@v%~@}v6K $*5#DQ|9wL@. W[Ц=]mlFbEb`XQnXyżUoeroA{NrS抉s\)&~8.ӔÅ cS|(2g77f.'N,׷^RgëQr>"KY2$%" _4*/ѬWY5~eqjS#sG`~~x~~xwۏp L_.>D¯`_&eMCz!s}~޽kEvMۥkߡ_ۜ~_f 0 $蕫:8&cqxᱰDƠP䣎ArռS=zʘy)|yVGJhK5A2Z!A"B;M]4L;(FDP, 6 !s h5ўۚM,hweSaH/ ׃R P{Z^Z~lr_FՕI'gH96:%~hOq.`9Oƣ~%;kHɁ}%gS4aw;/#p䞤]hΫ bq Y,ΆC<͈ŰDU?jBf6$M)@)@wCl/B6fU0۝mЎmI{ۍ/I)%X3}~5_la<vO,?" 3 z`J9  Cd/M9H{<9fDLa"{&CD)RR%nbi1g;J)`"M ~HL+Jd Í5c>ɷ:O7RViDLNG3M 9S82ߦ?hV3 v#:2Z)b88*Zd)SZNS0`g"VËCFj*e;Q!4Vib8,[/jm8wi h4kF2=()9)M F)B".0Þ2Ouxe] ωS+sqg=u"f3CҮSR^˘'`{j7 ՠhőp.aXER%VL) +נ1Q KcW|0_uyG)q$lF WJh ӄB HИSYgcP#܎<̼m͛AY[)ں~T>|{p7Y߾1^+buDWnssxo_L?w^>__u#oĊCǴ5,#fLGֵc"ZEXdX<;3c(`h~_7zٌ8^eCb67o т^#Jv].tp~~<|!LCFq>a[pbLxz \7'aX Ue `[1iK۶z>s-c1KI\`o10 f1grH8+C=zngJWd`k.PHx3jg>OLj9+4ORL1|; -Nu['q46} ``Vs5I?f#!Zh%2)T%F %vP{aNAa Zs~ %ϼd9^P;L3LI.N<­V6*DfKrITa Bxr^T1!-uauK=l9`0Ue zCsiM^6˦zT/&RlM^6zT/eSlT/e Z &Z}97\|rŠ D6.38'3͝]c5u:#D. G9*Mt%c^9b n9!48`By('Nq.K:Oqӝ~Ֆcy<>('y wa02-vHR҄7qN 9A5w[@Jkcf !)="^HBV@\;@%D}28z«實W[d[Ml@%8k30X^cu[',4B$[P  7[p2SөӃO,|N ;d18LSnu#,_~ sRzm¾-#e]܊YǪP9~WdwRl09ϓ~xk2*F@eEYη%OB6]}]!AvVVܩq ٘=q3#̏HQ~qnT14\PSPwWW!F ?~y[MDؖvNK1G<-6O~l͔yUK$cғAօHBtIۅiQUPt~ܐ:a!/CH+<kC*v ɵօ䣭 ˲R_E>zYxQ ? [/^ 5yʐSo?r}I Bv|PK ߺ+m[m]Cu* hpTȨ(G5wَq}s^umn+DL^H\8ߍ&& ',+ VhY|RT,ѮCmZU܌Ʊh)Y0^ɒG,q0'+h֟Yؔ[q vl~[\?&翿}}~7oC$_p.m{*~z~ l,/-p[KKcA, ֕-oYu]oC?hD31$Q8&Jgp@?,dq$`B0@@#35`'ۥP[1cks9Br\o 5pgHl"VSb‘`G)UpЀrARZI @mOo&yo6}R$$զ٪MZJ!ܞ 1$MB򪴜BuCy`uKNIn@:hBy9y&ijY4933Eu٢JSoB MI ێh蛇pqϿܤ?1^fqWAٜ3,8Lp_2q:+)pEѭ΃z8{ U.8gboׁ='ȷbk5t;I.!;tcSK <' Ӝ0s,x98^uiLfP^<'ƏwNu2?ى/BN 8u,ꗸ*S?I;(2ꀉAqe_z1HE0]o@3`DZc>K\bNCN^?|;d?E A3ݱeA8<2/O Rx#2M')F̲r G8Y 1qY7f"Oy7Ik߫HHHZI$LJD 'QBI^$SsaX&6HV\8`9%#S,kNݝI+aW8''bءKL"6:;c1P:g5X[J!ّ+I5y6S&hKڵd_4 Q ׈=[o-q?hUkAƳat]jG;AGi/F/G3$ @׻Y01]ݖy?ʓ( ojMh~Qlo|ّvUv?w}&׃]td,Kt#qWrAw!ؠCW4˰`]9&gI0H)+7yr/cދtSYcc?%rn"RxƄV.H4D\{cnºc񯳃S'qa}[h\ԧ,~+=2_A3;O5[\u[v}{º{RC}Tm[d禘SKPWxyG#z NF1o,ZG+/SWP{f1n8\ӮOўC*/VZSJq첿h(=@; mԉ7!=B#H"5D>GBc04G>֌pA1R2)-k[q%oݵa$)J=[P".x "ɭ;AyQtYÐn8a$kٞZ9ܕS )!G'Ս i9F9r%$7B EDD|ցk3]-vv+Q)f-<%աY.(;JzRlX*Bv obᗳͲҤQ݊3rzf2F:v+/XE͉OgnAE퍮=y @GUI-/Zţ .˺ujj3{sh)j:Jh&- ~e'#oyU_d}*ډ,R[-7WJi.aUнb GjX9aF,VOz^Qlwl;"edVxi\唓 ң eK_uS_J}~vj.JQ-BU6{:x%d׳J(n^3;t,m۬_ bW^b rӵrͲPwE6jhɎh3oZ=bxf4vh @j  v6Щn . " RCgu͚ >'&[/0SIM@-$yDڐVۿJⴈTnh{/|t |n>nvc#yKj{B}ݚ%h2nRahS »qS>ҹ~~Rx4ђ@$uƒ1ؑs]췩s|;r^Ҥz/\8Z`Bb@ݍL&7nti߳WڸhIBPzu`*R>X;ƶiF[Ѷo(XhFw[w$Z}PJ&Y3P\x|1vߺE燠ӔO1ϛ['b"&m`$ͫۍ Btbؤ)ŌM tLǎ.4d^4z ߬S@$XVsi&g07;v|ހ{&k bԂ a=(3ʭ2Ȱk/E9$j]QWZ)[NnG$ {E|T5(̸aD3b9`-!="ɼq.ڐkaI˾ qe35(SH)kす!aA& (m=q఍Kc/Ǥ0N(Pz! I[mq1ȱsī6xlK2qZi]rEsXL!zXkL9}Mx^wιouY_1\SKSz;-\c#LPhPypc.t_svuuzz;$;kTRb'h)kmHd_ 8>N !Nv6W15d8,~gxEmix1[,} *hxS=yafGmzV_+`דew ~_І`@ɕɸ.C{N;I:e99[9c| r-:=|5o(cKG7BnzJ ᒑ=8*bj\?$VW3 \kB 6C5R'j$.nlP0EA3XG#kC^>['g]JqTQ&#"( -J))@Eٞll]tQ1|=n ી.O*=v}&H8l#/Ol,D((BB,ZXJL!0"~ ^"q(8+."b%Ȱd =!"}JX{))qJ7Dd%0l\Fd"=7!<(fK;7e \WQG^C|ď]fgfr:VOWRۙ[ܽlg,* Uͪ&u: $pT_]ف%DFxU C}b@+"_)Ľ Ǟ#}z(}4\4Oh  ݽ~6]gomwѡ1Nl:NlБ"I, h]~G~UIW@5ΫڱTxZ~nRTz a29U8S&差t%4GSX7,=?R-8&A)WF3?7ͼZ_9Yu^5eLmz=pՙF4>s͹-;>R4DS㱋`U0sr3kAaq+HR>Byo~sDȱ9V"JX+qFI(4Jd|c%rDȱ9V"JX0#9V"#+c%rDȱ9V"vȱYTXCb rDȱ9V"JX+c%rċ|\'(=3XE ;w@}(/¤.U`RGTPJm6>`w1V]!NwueOlٓu3ٝM{ OUDeN_K9_Kef $(X+.څ[ބaYSrXu,RIE %a ~bRX#h(^al#'XDk\1oo_rЇpfFvg5\O.6Ko<iӷvi#?F1ZleF͝QFG UT 5ѡN*o1'N)~=Npq oJDC3m֙âVdjЁNc dV`BI) IH:Du"qt:WH?O!D e!x0k5f,`ZFL&PFs+%3fZaur2 m,UҌ 2۱eÖL(uz>&޽j/&ujUʗ5I+FfsSTuӀ.& h]?.ۻ,:ٮN vnB˔Rn{=/?4=/NIkota]h~=?TX~ڵU'HsYӇ92̒,RfIh//gYAެ kTf9A a\L;`;-RLp7ܪs>ttJi9&N_8FwÉ;1N c~Z<)6a% mCq3U &U4c,گskИIf?@5櫚0t6:*8H2d0B`l@!D<4%R&4&EƆ hd;"5dv{LE\<5nZV2m1ekFkH(4:3'H7#>5X _~3lsmsG ly<%Bۧ re(C%UQ M`pI^r2$Y1((…Mmtgwy] -Dz.Gi<(i ,P' M= DsOHXMR2'"%Aq+qvꚎTxGDJNc âҌ;d!H&HJ&X~l4p4~q|nf݅N jW}E"5&`kQRJ}8"i@Y?[)J)1K b'] Gnj! p#CR`;[FjLU@Qbiy#բ` @F$)Rb.+Oҟlf#ݮ{‚T2=w\^%kS#^WA,ElQ@OiTQ-#/ߍ&Գ@]KO5߼-5k`3Q!GgJ`fh]fpM?׏ii)$|iCېѰ6ϥEbj;IZbJa7oVv@y?yH4혬Pa~L i!q{qR$q`'??s_J~3HvW1z]w理>ɉP֕a]x`F|:QWEݢ>of-W[x2jJ(?WW)Y XI6/Nhx:';q%{ɦJ5GA\8S~LnGp eWg'1YK֒jܿɭr"?;2} nֱM$Sv.f)Drsznut_H6ӣJl~v!I%1WۄZu!4~F@h!$er5M[3_k,kkP,(6LF W_A[{HzR7f.{P~jI֑3dᶺX[ud'ՍN$Ag(1S &ch;p^^r-" VN)bxЄ38`Gg$9{pqTX94#ģOx`pfh~3|TP2㺸E7hX0taՀ8+C\ʕ'T͒N ZGK!y3jg>OLj9+j$>ànݧ˄`NZe,a߳2Xy~,@lm l>#H-7͠N8ۜPUg{Y0Cy EbSW. (3. 㤓Cx9gxo{x{u2[ϫY>纗wC[k?"-B㋮},{K"lgB%#1]ۏ!gN IM3@Q3' qf%r M'X2e{A<FHH5pFBb%#11)DXҠ8KFbA3ag\zzkTk|iS>IN`xI-=铠p'UTC] oH+`v݇`1i`g 3Fܒ$=HJdJ#R9,z|ϚHab9AS[9lo%Ug %'BOPh9BOȑn6[$`t/ĪYszJwn7>kyz})']Y6~\\)}Dz0%LS EN KZ>k~*LXZ?kumco|yYEeҼc9DiO+H>^ΚO䄺SY~2Zt͝Ϛ'9t2MeϘ+Rs<[s8^?I\]|I_Ѹqzg.F=wO?2=Ttz?5Pw#'X`T9F$㑅H>HԔ1тFQ4`pHx:vv)>Fs S>ݽRTW]78'-μ;|qq/Qajk٠7޼HE*L<iX{))Jnhb΂wDSԛBod[Aݷ%9UHH1|{Iʰ{Dˀ<φgu4A3skg=hzfJؔMôCZ.N2$5ll:wktq%*V-52dэT,,#$O0i~P=#j9z԰{RfJŚm2B!Uo1T4.ɀ>}U߸oxCKBQCۑW_F&lD˝wn mDX@20.5ju̳OjwI0$CmĶ!n=}HszݘB$Id^u#w[>1АwY;aƼڐByI2C$Gc}f/?,,{{ _>~{H^{ݭ[ 3 mYzq  ܟ  =::2OR&@{&2x(О ehߊ`e2zC~,o -׆QHV,93tN[ iᬙ!d&3#<}xԂ5]}OfkO|umXקH!=~xmܒw}xpQ!$\u/ 'qAz6~N ~8ْ!&q_z`3{渗93Lpy.X.*9_!52o<|xkԦv[T4>~87x4_]| "|?~hЎ>}Eu4C-Bه"txEo?} (+(_ہy/ na>.d&6YuU5}(h؊ CFkiNDtWDF9*b43SuV@O)։:+Lf0*̋E&:- ;|V [EtV%)h폟&/]u%/3(+tvEΕeNF]E([|ϋxq}Aၱ1s8S`O),|:H+qt䵰fm""UN%VHX|;['e!1`fGz|ݝb6.~>m!:_\3;8Ptum?KphL%|Z_n8[k6ka !{ʛcY"2RCAgӇsFd],Rg2DO"k/%%\b"XZYh6n@d"= O<4CdUi|Q՜wTKQʆ+yʫWJQeK6`~#[VfL(0rF<K?zM\ѪV0#)6gF\dL0&}ؗ!H?vy k,vg9xm:`%_oPn vB@HPP@hq1aW ^#)@Rb)[Dz>'vh` f| ˙R+؄\*4K࿔jG2.kGҪ饋wDaTbEA|B".0{<aZ ֩ a)Y&L$pQc(,7%R5c]l8ueyI¿rM1p<;6NEQˈQYZyMB|B˰* p * XEdz#9dG4&jbie<%ы1@d&*%V3xiB!KLhL) Ap3 ұT!v87f^n9GN ۅn3h5{`#q?޺'&7asGc|>op܋;\<=ulgylu(Cڥ"yKs岼RS}hxTQdۥyrDK`3MqD0;tN ]<0yum( !ŽjU˷IdQz_&G++"UއOٝ)J@WOWۻD9UHjf^-K.rKBd5Yl 0{Revq&ۘSuwVޞ=1|,u?W\\u5 F z1[q4ѻTi[sM~QD1qa!'%wwmH_$CUMRm' O Ej9,טK-tm6@?~7t]u3K[ ,ģPW*䢉!A]ֵz0o-Q$_gOugOg߿~{F>;۟_˴xm=kj_wYyL>]\&GdyK[4`"4&Dxؖn:myb#8x5#v+dW$1ӭ-YV$5[mĜ+,@$LpK&2ȥQCPّ%D)?1 4܄2UrRAɥ-C(BpbJ%X|Qx)c!"6&Ia&TW{VuH1" .=I<>[Q/ԒŹpjq(Tʵ%s4\K3A CF@2Jp䵦; V.tx:84npgW6|W6앀hi0|fw4lqijP%^"`ny3j; n|XoGY7Q&7:0yWCP?1bF(`r0Yy~.Wq<?y&RZX" q rl:ϩSPeݺuW[eXzFɕ0:z&|BIƹ5$2gt:%"xbAsX V殭 Ҭ}ҭ Gӌ ^rSM `:9vR!LPPMtsR5[%zEIc74 igo6c|B'#IEPg2qƥpg6q&y>EctDJ究PIsKI}R cZ,OQJ+eV};K:FP ʗ%>6^G^UC=7JDd02ƆJq6KQGg=c~˫SǂiϩcbD-D0Ne"4Z"K 5צTОz]F4wpܢwWmjqbpz?Y#Xb#VG#\$.iQ)Z^2E]c[B7ژg.x]<{׈Ǜ<H~Hmo锍6l3ٛk1(gcPƠA٘CLH1|1|r1Vg,|(svk18N :gC@UY#TbM;=M'c[c lsJM3K>=0i^Ƌf;ufF3WAz_Nne#w}Jgzr36ѥQGQLO]0Om$ǀ5 5j[&9;.t5pg!-D3iOvޡlM<[ar<l;ݩ%}d~G.thF,X`x[+\xes!sMԣm>eW@ eG/gys,\r~yY>5(SOVq!n /X-O*ȮVDr0+]5|캃9j}/\{ l1_t!{?[jB΋\͇e.(%| (f -G@ei͉t"8%x{[>/V./t #O\:Zs8& Z*I$nha]{)]=V|zI:?rF|<':,;_$i^_g 3-A'ӰwgB_E>>~zST[vЛ\U Zt}D%Y;]ogtOA/^64|Y4t^f:('V Zec"}W ]iנRK,mKظӳFwIU +Z `vzǻ=Rf:vh #a>V;ݺ,\vާ4kC˛2AqhA ڄ~;Oԭ.qh?fN޲S*8<䀾f37iۍ Î)oI/`n{ޯeuO^ 5Oz=I|8ؽ`8 b1ydrQSKwիnw t<9mԈ6ZStYV]{$†$Kqg52r⨱SV~hϫؔZ^b\&ym%A 73IJlDP@(FŔfh?pһ7^A޽KbtX>)Ɍz}JM.VWQ KYﳠGZu9N413'2 #5.PF6O9WhKk+81Uy1ib/j㩶ۏW׫fNc1#4| ) dkkʭ@[>-Br vs4\ӊLGu ;{<iB(ƺ.7%Fs77 sݧl&sWlYB7F{XU]k߅#?v{7K2*V btt 7(e|ũbnVr/$m2ktx"UH ʷW^K 7 7чD!7!8Udc8r1JsѧoT X|nH/ I 5y ֬=.|O|ZMG B}$<|F7[v8b]jw[tWjE~kPns-LmL[´Xڢcwrs{JZNUKܑii\# n->{C@t*\f1bEyʼn`'B&i4DQ\~4O;aHq\~|]+QzRQpjvT&AbQ$+ vC5ŧg4_IXY.NE&`%#$\^okf+ +Ij (J7GͥP5G?1Cx #,ͼ9bQQ!.($Z'{P(\b(8F98:Pyf弚Qc"8 j좖1؀ľ"CpVyXIɀbTS n %BPLZ{dL@AgqC֞c |eU@NI^S"H0o .|$5.k!Hs㏑HaCP(y/]J0ř R ͛ 9a(2t1M{pE`J0 ГVQj,t| %R G>Ӓ Ω+ؾK\-)P_!%RÖfUݿCaLoy?σpN$\O߿v?}淇!`y\Ho|MA?Wsu8 cTL<<_ EE6dqT*? S6l#QQ9guP l,OIHd#3hf zfH﨤2P|QDF7Ȕ.\o|Z܏ï/ /Psq'@ۇG#!_ Dzf6ZAF#\E\$i">AERIc7rлQ !!EScF/}EX!~1.z-9]i! '&qnt$WS3F8pyq6z+L2 FD%} Gp-9߇(́NE;q  ,.TfXFp%WxP%3΂0@JwP(B>B>;@)'#,3hi,;_V&0Fdv:5WA *!K{ {3BO<gS&,G9VG7Zo-RTp,,D#ʤ: EYaSD2E d9 CR)m o܇cYTqu!mܭ{'Jzpjˍ4SP*.!&BoPXb.-~ MDn%"TG`Wà VcϞ}7X=)wg< ,XBP9zG9v>$BG6|ve')wU]TfOOfݔYj+|7|q>wbL:Q9-DH[MD mp[(@Nl@h%?D1ym Sv-d`y u! NRz4tQq;,ݥ9˜[x.y*2)7y*ڃϿZ+yC;ۋS w[,sW_D] ea(.߾6ɟ7&ٛLgU~Z7vS8wVNN QT*IX1JՐ\}Ԋ#l W,1.Wг8{ -KZ2Ay"iؼ\U ɔJHfMz|;^|TpØFEOqR.M_DCwWR%NAɽ {giLҝIνᯕɛo7ن5_Yu}SfB[ni`(BVr7Nݸ='kږm]3ٽ펷 N1 Z+ؚ[-S-d$7,}1UQJY^`uR3tz-|Tr{痻/on99Pz7fo΍+;R,$Zt؉tF<,uVVlBNdu't7($~x?~O?Pf>|{_p^Ɠ$u].zT1Xkӆ747kOӺQO G69v.q 31Ӏ@] :^sb#8bdQت#-îH#Hb AJJ:TU ~}1:?5 \K`@Hb VFb A[Zcȕ7߱׭e!ED/*$pK120@PJ< nRwzl%,*QҨ'e!MqQ0peAu*R֡FL$EY{!T [oji' #doȢcCS9zRE-cj;RPh7ͷ^E@\}#Ë,! G  U Pg3A,`\Y9L vgjnl=c[_&?rnsbS-=vmFb-GbO~,'z Nst%_=[lFIDyh"!^:Y^22yTJp 2 H!Ֆ eV1'8ÁHT ƹDԇa9rnX.qׇ\ bylR}!n|3)3e5/~1wVTY.+3&'I3Q2& `k̅3h0:ǫegn\n? :F m^gژ,Nt"HNN'R ej>1o4PٺC7D/(18-h;s4M"P,wVz=S72_n*~ݾ  * gPΦrsrʇr UZ:3Jn;W=:*I֮ *?bF0sLy!e܇KX.]LJo/wT:/#KofU!,d1F޵ |lJn燬7{8}xehZţMħ] rgΚϊ{hsXX. HBVOV;W~*țd%?#z>yH[՛w*j,tGyhdwX ݩ#\0ڃ Uaꪴ{rܮ O{ᣋpgy]J%Q3uBHMLXVmXVvhwW$aI*7 RP6p(LHP ZqD1H"[Ю׾5K+c"@ RpjҔG-XʂFE5Y&4wo17!c5FVZIk%픒vvhг"Lv¾퀷=x|,#X׊[+D8oDd},JSe$B{~vsީCԿ=JuW?j`%fkPFPv&v):e1FWGZ|]4OkTi2vnu]yʴE}~GQ`ֲVnZ9KZ$BBC"FFYj֜I%Δt SgU囐* ;yKaflX]Mر,ZXbE{ _3}1;*[<w+$V0_N0W4VBkg۶e^^ޘjXzݫm'o:uЖ4l],߿ MqY֫h*wN܊ ]b"LmC澅0%r͉0>wX.pTt[CM KSϸM Oϼ2,ii4&N{a2*ff;:iSהr=4un,j ( E鄷!Iѯ@.;wyȮ"JLQ$Rs-=Nں'[x^;9&90o7ÜfsO{RaٯUg)du~'aȴ7Z̷I#=y;g>- Cu'E t"XU0cx!wy&O¶uVrmrME2MnMnCqP\pzr'0Å?!3-aq\tf? 2HRp0DB޵B% pF>lA їj1Ej9,ןDR)aLoSk+%Lo`e@d "iO'njȺX1 ٲnqHL^h`;URvm }c_>g4sr6V+b ZbP\ %D zShmL1\w`;C @p&ZVJhH9pVTdR Lao+ZuuݭdΑ] '>M>}-̟,hCe@jƩ4 /%F #)iUudk@E*D "10}[!a=fI#eNj Q{4QD@NJx4$hbB) 7gu 1u3y]\-m[D^;hEb%u92RC\\L/;`"=21SdIaT~kZyl!;-0vx/ FX6w)dҽi-к7_ u-*KzrM3Cd;7Wu- nYnOD(^d!t apBީާIaTrW7Az;D"q2 :5 _k 1&JΠ⑅_{/y)5 v ,!p A*%NCZQ63RԖERr&2N&4߁q'+&^pgr\^l0SL!P jwknt!`ܩ2$XMǍ]7=N19'Ԡ1]I(KrrGN\Y…ݞ3'0E}(N e`QB; VyC~eO7 ×FLE0S>/!` Up;t w wbNgKe nhI_> ;!.M#?7KW+Q=(Gσ5խk7;?W7~>.51J\8g~oEHj_fUn|# i֑\?E0u<[Y$OȈƓJF]̚|26]ޏ96,N1yfmMlMF-sT04<H>LCDa}JO`2^ߩ봂p'qn9O?~ˏ?}B/|Gԅ2MAgOϚ~ }hûkF2r0].eܿ-`"V" xh Z׭G,me4vV1[S$%YUfa 6` n?%jq I H2h꬟/8:Ŭ - A("&?DpX P R*pT7^;H¢b|0 4gxe=F8), Ђ#!zc %K.3'?3ɷUo=1M,9Fns$ u軸e ؇C RV* 5j<%JD1^RaR6jIEb dXX-Ea;-NفFB}#,Eg9 Ao#0ԢJ{ijmsC*֍t;"e19@$2B8. /DY(dH伇8}m0d8nM "崐0m@;@ڃySb7ŧe̪enHgweC=7r}8hޓC{D-8HÌ(D8pv`(PTs0MkDŽNS kqsZi䴣=0Eup©{d6`k# e# &ㅪ+j k"E>PUx%S4܎Q i@99^t2 taT&ggY^%d0d:;1bA4 z?;szAL0ϣR 2 H% eV1'8ÁHT ƹDlgA+lC>o1e/gMͯ}^~45V/|>kWTw z j 1 Dɹ6j͙DVLIǐK<՞J1p\je|| 9mșu8,[[Zb{Y媣5nYhʞ//+sgc˺:}e⅙bcw-/+U{uv*6GԮao 6IvmqFE9 _aKCr+.^ҭ[G(V[ǑEx'P72> _8s5u╗M"u #$ʱ/ h{ /g閅!´<D-V(y!wEAŸ^(JD[\\J8.p $B&AUI !#zj1!15cZ8Jwz%u*y l\t}p{d{֫a{0VćiFaRUn.1lXVrfFWXt<lp]YS0 迟{zKtzw$G;0DB72wR1h z&z•aCF 'wh $Iry ? etr zYΝpio|rwmP&!xxF"ʅVJgZ6MiU$wNv?ܺTS$CPSIHp% gzӿn ER,`k1#qGJ;mykޔۆc"M ͨfI]6?v@F XrB@ )2 Z@1BDkW92,%nEP8WFBR;-: (%X#~`r6>j]Я遶k<<ܚg1UfG+-=2aYW}S1b*13sE=r)[m^{e -jJo{28=IUo;<K!  $7g)I'aBn~GٻdUvo@dttb]W;O(x3ˉ@@ޯì0o3.sfN_#H=YlfA XK+(gC7V IOٹոbF79?Lxx 𭼑7o`.E0)h5!)ƅ_hk= zrƓݨgy_~/bPBU|pk!nD O@ngFA9p@bCby-o ~ΚM;OKl4yS"RaHƀa[uV*a 1ET"na䐤2j,e;Q!DH1W1-#"?lH0`0qLJD$(ne#wsu;"E Rr'fi%L 8` (O -g Xف2 B\s`UYYN 'a5+6aHIZ()Aj>b4 v Ag=63L. 0W8`*0C6F9FH:bwcv){[å4#<Δ hqa' DVÙ}ej!|4E2Lfb$wY>Bq3W}Mi{*]/K~GU!(b0_Ere'XH)D0:NLQkۿ;)& OB_o%P0 VVt64P& -,IzN鹋"0(e1fk_2pL)HQ};tr=ZSEU,<(MN5~B@̺6^q[H_UX~ʖTΪKs߫7?/L(!茘ks~[c`(6fR:2-C%%ᖮ!HLY\,} K#F1An:żuos7wN;jS_bKג)E@ՄEQz[%:wz{ˌ.N8k_Yg jA&jCɅmt)ڭSRQJs m|S<ګeo+nq_r1Bc͚#Tp&=3uypP"{buR$Ԝ%$P懬S֤M_)F;HrN-zTnoKAUB~Nޅoj~;8 )QR\ ʮ?ݠğd?C݁$am-Eo3tnՠsb#V,x%GfZ(yhsfȵ1OM0&wF}ʒn r}ȵqa\ j^5$*8ʝ7\ @@1gz4‚SwhvG4۸^mۼ'-}g? n1n'Ii 3ϗ mKow@#Swmpjdx7z8ɗt>~vNA<(`MS.!o99=r !XQcVzPG^s vtL"Y eZ Z ާ2LSѲ9["J6~2+ z Lwt^LZt2>/ߢ™w$\؍W:\ow&eviuHw["%*|#UZ?Ȁ]L:^cNJP*WW9(6T:+![ϮͲB(Cɼ^Wnxy~<*v3p)a_r + ^#9hG l^U}Җ->5MUg?ƝUzi"Fe(&73盙zrflTzm7 D^h|e2Ri|p{{[0#F^P3mn`vfΌ7={qcJx-/,,"{Ko <ίxe9=һxFRkZ2Excr8_sfYmeA8xn_}__%h,ڸCʙ,ǟѦzvEdAc-J*R>RLSNq飶Q12#XP1g&Ϲ6vײuܵ%w-zNn;}4]j߇*~+& (eU4!)D%$@tY$0R:" y;9"88ff( r64X_ i:xVf(QHLd$&F:łKhmH\ 5e;N8}rڋ=0Eޤ/¨֝`wTmS-] Bݺ`9>PkقC l9SXV𘃴N="8t ߨǂ:O_'vRGyxyʕ=L_\>ɕdE1ey#ߞy-#l nMr}EE7KKܿ,Req)bq|fAu|Q ,Rg2DOQ"k/%%X .XZYhbmM;YeiŹ4_ .p۫ˁuk:5П|zTt|y<|Yg>cvy8\A6cgfa{kyycjX^ml;i+DԣԶL4ZD۶!OׅW/:;&)b$ _m׿׹VFx?5S*3 xĽ!rCm6y".gj׻Om2l YePoǼu+Ih-m}seXi^;J u" Ӿ}#7Cc r΀򪴨;qd8i6 q'pɕy.њL<!޵q$2K`Gŀ|lob B",O.iHFbV_j$@JJ)hKiGr49)#OgКӋс[M376tJtn+'ts,Sh)yyaӽU:c﯇^P#SƕBWq뤭|7Ms 2 rS Ad)!QsJ!%ؿ* r[ɨ51^J"s:Hk'rF&R* &C'cA.j' 8>D"g\ɥ8W#(Oơ X)dZRѨ;kFvywuvVިss,h:%Tu`2eG" _Tr Py$gc@d0ݥ= 1P@ I81s+%oL%Eɋ}h(T\pq.s 1$˻M !=#:j1>14ϴBuu60o 2 ^ 3y|y$ə!p4ɤ>ߺ gfZqKS8a <֎۶ܳ߇J_*}M|v ]!$=5ڎ-7N)7).X-)?]C]ìC.2.yP݇7Vo#M{un{w`ir~ H*}Y .Df`A2ٺy̽+#o ;qou%|&{C|N$3ΠJIaFs h "=j&wM+V݈HtykqMvS&tl?ބ\vf{mB U"8 Tz멉V%  @zSi[7֢u=TvkWJ~at /k Fu,% K,2x\χ 5f)׼|4ON髹p50t9}" tl?c_O-c0+p~2{2AA[z +wg0ڏk>@r"wA&&Qz#+#nIɳwq0sdV\#׈z&NU gq!n qֺcEq*:&s%? $C$\ }Q}?kBj&峓$2ZPHҧȵZ jyvDDcuO{:x:NY㬌\`w 9\O9 _<D3l '?٣~ɬg=y~ohc륝/'/a $BJ#Xw:oùƊ$:(hHU$$سH)2Pb"5tG} ;p"R,aXs tPc/@E[ cP~c5~\ĊX+~cc5b[(pldD$F<hG˹!RTռ^=.իRqrtЁVDT.Pg9ݴ9(^ ڄ"L(C%gBəPr& %gBəPpx^jf 1J-ޒ+J-RKԒ+J-R =i_h{2(+A>tRQRebɒڨ? r_@j J(A 4(!^Y.cO[b@5d&J*Js!; J`9x@37&Ns>HܹƽI\_v톬m܇.gm"jn.jމSNtd!@e56INݥA[biP4r wQS$gXm>@!bA{Oz0`C#0L@fLz8 X;n g=p8M-qEgVJLŦhl-vE,dYjj޲@h>缓#2W6ZK"9%DZXr1: 3R"RqP& 쬥2[#@h{׹fr}=!uܺ=s0vKs۩t-klYϹunZϫƻ;giܣ絖!-nwּϣ7s˕׷mʪXn?̾n>fEZ۽ ]gڡ1,b6r}F2J8>iv5DH#$X Cr r[.۴lߖ-βȲYg^2i F Trn|sqatQo9TR6 Ȕ LmnKm@a؂e2cPJcyo)W!JN54ưQH< \"Y y.qH)H#8Ʒ&3 \F&F0肶4K&}8XV:Ξ>I%]4Rr"\:WN4Ҙ##j 8* .k$Q>'IMO9t΀gp@ m Y gN_jj\|ٍI:R4FAyF! K9S4L{$c8.MFh megPq(sq¨] /cG?Ԭ 5(jjAYM¦„;Os߽⻫Ww9<Ȼ߆4Fi}SXQ&5F--,0'ѥhDjrXTV-wűӝcg 1|lW <ŜVqxPHQ#8! fZ gHJpRWA iF0h F "+Fg31(Q\9hXVLKqwqgԛCcU[Xao-+6I2 vA 5|D4܁x`QؿATSԀM`0ńgBR! Fp$蘸V&  u+Hmn{dRޔ|c5tKF2fqjbh.z0s0ޢ5# G]VO988Iȍ0LFOfR{e=5O齾6=nMD5Ԡ1SQB9a#rN+͟d]t2E%x~?[k yBHrR|e֮. =քޝe.<%u~Ba^륩!^V#hɜJHgOM* 4z~V8K(K?/px[HԬ~[mi޺5nFظ5{Sr27H޻3r횣YmŴ6= 3AHH$7$# ZA˳uufy`訜8|Epп\zwޛnYcdEڶV45]:qq!#y`/&R=O);5NKtꗋeo2;z1<圷Ol#3ckau)b0nD P @D ]W_feeT`#rvNjY?6ۿ?m 34Ņ@I,p  &A9"0!Ăyg=wd& ˊDy9}{hqK5A2Z!B;M]4L;FDɒ*;Ȅ(0BM~t$:FĊcY,xp\v$qh`1GQ/#k1y*oG Ŷƹ`1X91CcgmI %_jY"k5aw ^Fݭ DQhAQ]eoQoքgVJEv00߫b|suQ#FhHDĠAau|_L>F#j4I7 ŒR[ '1beP}?$g_8ϔb`kH3oB! cF@1gz NVF,y?`$H2iI+9]6:lfz^<Yei ߙMg2$u _C ӯ2$0)_ .rd nKU˦=q@|iQxN}nP{Gv\ސjOHiq>"IϪv]m8¤i(^ID5hE*X? Smq]s-9Tz)`-@.uQ]ȸ)Vd<< %r[)KC{ɉUqS<|xR.)O?rS1@q@5EW'q -~|"KT>2 BKnK ۄWW+H=70VO/[جZS7o i(-kqLMW1F?Q}ՂiL]bd^_.GM]#3NT_mY7nqs3PuSo.>?F*?v}J VsI>-5" 7zz7_"h8=NJPKq/C=d շ _@iP+RZf۴~=4zۻC&[g?n;N[~XQ`ʦ:+ܑ3'}^҂l`'|Ƈflܚ!}{0YpH>[^!&>Q(Ue'p6ɷDk0S${ƆAT~@O̐!P %SXVX-r#CMԠ.Q;-WeC2jkK_Vl$] aOo}1׌A|{Y^"/ eG*{`(F"`"RSFDa$EV 1liYGȧ߳-MFY F{nh\f+sP|qry0K㛎o.od =!"}:FcS"!%K9 QMU($1:wZT4ni8Tu ^ג2 #eI >g]St|y9|Y37pfǘc1,C[c'fcZHz_C$UkPRas 4LAfҶ9d\Wr2tôr$\ķM/Mw!Q^Ϳ3Vt,Ch5dCd;Wۜn3{۬3'RgeV{Gˠߏy7IM&ήc|L7Vx4JK: ":סY<:ʋBQt̛q7 eWkp)y.њL<>C '829@IHR*9Zϙ_``͇R*4dC+4¦UoRH ^wΓOp9=Y~pz+˜RӘ!JcF1K)`)$Ť?RAA'?pu Wަ@*pc 1D8G ĉldTZux[qN cBᐷP0vY48wE%怜| *V&pkn(c]U\C/>+΢Q3O]J;KHntrљFD>a]¸d9-JP&d o+LwH 駆D  r3jGH2c$8XT+.2=F*+${JqTQ&Q#% -J)'R]ZنKKDl։7?? rG{&Y v+z (D(BB,ZZJL)0"~ u~4t=?k#CZyP੒pd @zLe>NtzY@&1%CO2Nly(h|l%{-S;C.? O޿DШHptQy1eHJx_2T>l.o[͵.͇Noy-pZ7Ba\"^`5apNX|ykƨȎg"$ _K\(.QSU0 f@SU/_3fGuL0a4v=ڭcxX=(*$ .Ȩ *%8O,C@g.2.,hjF a F 1 (]>U]C3p { HitL)",0XG$r.bpNJMqɺƬ̺;-ԠWE"5&`kQRJ}8"ih ,?:pjz*մ#IDzIP|pq"`"눭~1RF:$ef=\ q 4sK\K?C;? g®5uS}j!k.yKGC)bb$wY>F&#«䃙\fO*[~x0zwכn#h{I6W0%:Ig5a#cñKE: xq1#I_vX8dm_ [8Qf1I+[=(JC8CiSUTwSÜ;5W0,N-wn9cĵ-Y|80 LpWulyU3WV'~zzJ_~w?~{#eۿp-~sZ>XѺ[7㹖|s 1!Sȯagd#D_ @ 2)M(v4ĵo@} v{?v9 D X1\zm_:7?+J?)l.S{PDM$Ne,O UQE9ti-aUCxMeD'4IYoO GhT$ZɁ Ĥ(B H9qZ"%jPrQJ!fw`1 qfSK+(!D ޹**%ZZ&@)#մ@,0rK5&dp% <юtTTqfͿ\}oq55ggY,[Eu, 1?WՑ$im,o klXmx]3'kۭ5yu^Ԧg2+6w~ri:kWIcۻ6b\ķ۟'ϛLt4WEՏ^O~)x|%jfDWI\=wozL^>ޛ?եjq*# iO7DtHU"U[9,d tYigʼSp *5sH! !ȝ "D锨#,Jrk)NBƙ3 ݅פ2<fY_VhC|q(nJ3l.J~S )z_$šgǨ u}b?0pHLT"YR'eFfz(rz\b,Ā1jf:f"28hC7j!SBě%d*Qf" Dy3,fWgk+bzq]2 knU&ɓcE1;͉Pjh<1P&C]XSvҠ-A4(@9lc-ErƁU"б(D4,&ZM.Rg&EZ3&J<\L|'zՌYdk$BSԎb,xϳhmyoW_?9`Go\%h*/44z/sT8x1'0J PRqP& lbl%)cY##Ĩ.1Pcpr"9g)%ՉrÙr)*;s\!Msߺg:F#%h6tܝ^nվ|~:7*ry?WY؝Qu 7쎮UӳOv9ͽBnmkH2u7|b>eIy[a [[Wۻ-wtzfmͺClYa˪6zn󆞴Gk-χuݾs}"yL)W#@uL -l1:& ,3 a\R}X `[fԱ"(Gw]{}~@[W?woہL6v`Ú[sɫW&OzvWodoˠ+\Vغ =Y)Ok5X>!X*}V&iCF/K-rCzGH1qt2ү]׉q\›݉4ۈ4'QNlL!zhY˭iVcr8Ҋ~/cVM&B~D=҉PǠE)y:e+Ѭ0r"FEAeU,h{ęAc4zp7q bcP>*&nyKb3x=)ׁX  P=*SlWF'T g BCLJt *J /?|Ԯu#<-f7lwA''O˳6˜>Fsl^ Dc6 xmJ؀ }*4,I;#@)=)Dxkm%C d@jL%4ֆبc=#t(6)MyЂH.8oDTQQͣg:ֱt~1[?_kuѯ&X] BAHJօJ*dZg@c 0ˉ}Y|V`IIu2) Zn] l}ga2C2YZ~ xxv|.ߟ^O0d:|/gm MkJfV55Ԯ;>rI!*̚-fԛA "qC:&h`$1KmК3ęPKՎJ,A]~TM9f _b8 uPʪ-vSuKњlyE.i9pߣfޡby8G Ы+֨[ ^Ns1^%꫖Ұyv R4j[eno4ᣕ-7[Z|6o 9 gEh#7[/7m'ֲYEjBU'GUg&Sަ\MD^Og-;eb(NFu_Ռ\iW FʳBQa[h a-J(ԊYeI׈ *1HI)|>'aj-X~ >7OBPlnܗ\A&Wx@Mij1Bz=&ԞQf/j~hB%^'m}%姳T kQ+-J"+ TN 35l#> *~儓"&Jpx)d\;8 {&R* ePpXb#ApZ0;kx'Bd+MUJ8iH5okKc>rg 2LgXjP<&*[?b ݚv\Gtt t$ {`}Cz 67zexi.fS9D+,R2ʁ~G/45Q.5 \)T(rzrRO[b@5Tzf3HTRTR Y9-9|uJҐrֳUNdւлF&&ϙ`ygE*{M@O(f5L4D(t56)OUXZӠhQeۢH4URBD+=ɡCg&EZ3&Jd篻fDPϦ7nўzK﫩پ~۴o@9}w֣~Vs}s#vT?ROD(^Ic$:+a!}2tlgK<[9 VlͺwuW i F T1W^# *xY2QD!dw@P9-rmJRdY=#F(ɂyJӣ2HE-)'%Z0`sD#8?bKvi M| UkBj{X#($5 <:: 9牋 @,@ gN֫zbT˙WA$AZب#(E)HctL@Xg$.,R`z0 HtwmI_&ԏ㐕Inv_)D C$(q(RDL?WU}澩hkB[ԙdDMńM3+kK?T@Y(u_QæC'wp"M[N#Σ(cыLb0(Ϻ2 ŹVA C# Q]Z"L:iTh ;vÿσ0-Ko(|9)B^rxY"EցШ)^&΢NEak&q|t~Ca*Ԗ5#ٷ>_/\3P:5?*g5qĉ$ͻ6\8GP^9>K}.&ųNTz|>pisi?OƖ۱]0R~Ό:;9|pz4դUcM 鲩X j,.H!VGQ̒|Pzt[g7^9E;ZglbOcֵ_sHX`GȿR-/0~-_?Xdj*gS-/A]EQ^U(h0ulWuk*q>b.|/~yO??)e'e_p^ѣ:jj0[7[5@mӀ;UּͫY\&U6{= 5.7z _hOMvwy71Y1HhqW1[S ̇ff8y7;PoӦ8\T[uHĈ@+þo4IVrDpoy 3у$N5'(#H)=gU#:#3NX֤!߼8X"U"1[HB{*eQxAkEpXne ʰO%|Is,heF1.@ö(Eo;r[yDٰNEp57!8Np96x|gLq}`\aP 1V|Z֠|VFQ\8ʈ6pwI9"c}&ۍ JHqP8ez^ q,*SbTrKI[kjעVy!yxNyRƝugߤ 4?)P~p_nr3rY5Yh\*/IjbDD02W l|M KB*PB@U&ϣBb (,%L{(!ĺD@A먬m/;h+1)AZG$AK\p^CQRţ[[Eg76haC:J%h QEl'}":wTv\~OBfcZC=GQ-xBbte2.,Dn!h`Q/}p t_Yf5m pKMfw.懐=p| AK1ʿfz[ɷ ]0ǃ@$SB 1dHfEy:鉶6Ӛ|h<[>kF~ػʭ@E[6;]{rt&C$^MG7I7TA҇*&h`$OcgI3),C*qT9* Yu6& @V iƻ9Xi8̟o_p0(XL -»崂5>5TvF]]{(#̎0G+uG tug♉bmro&굼,ҫ ECzHm6 ĂyȖm<.Wr4qqCr+&G~lXoߡ\-0ε"8g_30#\;nT|,nqiT,~X=2l\ {$v̜qw3zxy?44 +e4Y&zc~YSLO+_.TZֹm}=}MGn&CSr΀UQ;mp҂m+!rr &) LFt>' Rs kRUwxܜ|Bm򤴶6 C|{-¦Us)dם\UlwAgَW/B:g6peEHZD.j[: O2AuO .$'zogOsW^'$: 댎)& 1\9 {i-"9RE 55U=7gGt@uk z3ͼQc e{a=_viXc$E,wzW|Fa J% ^%t%'LD ynHd ( we(pX !"# >ŀMI@h*$BPƹIB̊M*]@xFyhd݊;讋k-fMQP<7W//||ŽVRe=wG11Q-vfB={Vhn@G\/J\Mgq\~6*e)9kdilHem ync?ook"n2J7{B?аqtKy3&the< %sl:c1)t5w 9{U\7(B85gW{OT Wre㒼d]6y vɚwWvHkOHKHHzhzĮm7p3ʐh(ApR:f\"!WƢۑEImم*5h1ɔ ABR ie*n( 6F-$P\[(b@7hdqlv DKl4!ZI蹍ț(؆QP7 .[ txu?xUK ՅF/V.bܦQB,=sgaPZX)~pC1'ENp}D2s$8}6svkuG V >ϗUVɯ颰EoA(G~FiR <S yjiLyg ,V:8>QN(]-F_*]T56mFwT~`ARެB&jr 8Sn1_|f|=F+(N~8+]O)%Xvm'd$_yS(2a1"̯o^(ZZF OL3eb"neNE, jYnc7݊&;*3K7dF"/z>gk~' w߿_Eninh E R J I*t74;s&~LыHr) kcW4 f 4S8qI2\!~H4I{AW7W?}zTT\E}I[;xy!i &9Xo| 58- aC4cXM$w:h!Q8TW fA)!n;D"wȻC!sGڹ}Q\u;"wȻC!yw;D"|?{Fr ;Ru΋-98)1Hݯz|CRPb%q]Uj]yr%wI]yD%wI]y1ۃiE$%wI]yD%wI:Ң(.K"Ȼ$.K"Ȼ$.ëUN0nڸ+PUU8"h7z4_y 0$`,De,j)6/&:mLw'1-Дh M50$XD-[̽R[K*Ljd`d}0z D bkEV D[0ibƓ- | aU7\π6*KF4Yn2meȘB)!-[,O۲,[~B-oɲ=;k<6VJ'N!9dZ,eJFw9&J%A$(Ҟ@Ә 8E+X4JqqMy !h͵G:aH) `0 oQc(,7%Akh^i=Sw!)q blPڢSltF2bHzT2E'b؄V(\jڄPMz*K%%U4c,TJLs5hL$xbi;质|0_ #KG~tx# tJ "aP)S $T.C~WFw_J-Πx$wF4~r^ǖ YcdF`1L|B6f표ĝ'ںwYUTDC_l-g5 .٢ZhKJk'!ɂA X.,hj;N{:!ײz W4TTPz析+0̤dNDJV=Bۃ:R")9IQ1,*͸J&31`Y ҂ݗK?$LJ*ͬ,ۄE"5&`kQRJ}8"ih Q=57L. 0W!x}:տ@0XYiPh`M@ZX.]j W&.EZEaX72G3aVxο%wB}(T]ʨ"DU5?s{!>?H D6 ;]q*$L.PM\z?g7*7w0/_fܱ0O<, }_&ׅ t-HmyT+G=_o?O/>]u.>z^ʾM$ph~1??Њ54WMghSWǸ e}\Bk_a@/15Z1Hh~#JQcQ=*:M°yPlݝ?FfabexPQC{NjFgW?F?6tpL $cFB)IP),L(`*huO%aVNq`iN?_!av) 2n4p洒ŢM_xS|̙LM޽|2Y7ևKqo u$<\r~GQK*5Q+iiuGh\AQy6`E^fP1mf`噦i2Z:paL@us8*wXUS5k a/&ogx@xa ?g8snoϧc 2"l)2)>jWP2w8E-5r24 d=0=0$5II.XtZ <* s]`FʩR`\XEJ\ˁSEeBN8`VTsJ{GP:N\* !mK35" @mr^c7"a-QE"ڭk"q֦@ ()& zSNq飶ăbdF/cL!նdl=;oE)GBZ+'TMgi|3I=osWҼy F|% Xg:?cpER#"T0sdr LpS:ϯT+ ¦U'=f),]%M}\ X \x`2~^(lC[fdrW} eQӏǪzAf'.IlP/Wp3hwZῥ(h&0ĸP/2 atȊM< =& -o֣U[Y>ϑO377I5U9ܸ,OhHɏs*OjOs@xتd=%j9Լ̤gv:~DuudcAPטLU$ER$'m$^שڵZjUљG~ 9 jT7񭂵 R'IZ*2[jGi)WCfտ~ܺ=Qxhmڵ)7`0ob'U6?:FG-ΆټpMV2 d%[r B+뾵Z"i#)(adɪz\P)&#}բ$ROkd 0TT.Ё/,4'~%)$)nn쯧}oDkv!;%)6}gݗ?7>Sq,Z#%)m]#!pF%&Eՙ@UXh!`L"4=^<Bnl  (ۀ)JZmmɢr}[l lQkT%:$%LE:\Eڠ' mF4ɬPQɔ F`)Wt4FXZ\4-eW{! QD.F(`Ƴ_ӘKʤq-[鐬CB'dɐ"HJ&d[5~~ I:p9 AQ55cU9+gDE)Qh:xѬEkA@1İ9Hjhl R"vUk}GYGW(mF萖EC -I@S6ed5HJmAl5U[ल8D1`1zҺuQ\2 M*\ J{+[lڕȍQ#c&E (XeM(TGhUB7W*KIndjѡ^&< F2Uc`= 2`Mm nEE F'+ssq9mxY]'en0`&T !V&[zS< Eɓi ,⊲N ؠF\[L voER M SnT cJ̨6C&T2 VB)"Y!y/L* e LSᷜ2EP桱*ڃ@z=-50!ЮIb$AmU{5AɸC@zK)PN VH(, LhJ44#m3CY,ݚrHx`1fG ̈́#| H dW&j\Ρ8()ou*Q%|5T&/ vR A6gT4g"!(Q.0QQFB e'8FZ#qݓ.ޗ5 nlK`#/UT5h_LNKG)"!p l-@@Iv'PUP:o3T+꼵 Alj ta m+&ĥ(";f' cD&TwIJ!䄀("Eض1O/LqUƟZp %cc;-t2As^40hAƫE G^ `Ӗ4dw%$J-GKW40<@xbgQ8Xr_fR qa<%EPi!2QӊAU i2"`z `CM #2Arѡ^F=حνQx3(nCe6 &eSU_ }qGjlj6)q*En;ǰ`qQ's?o1TXk#xhW(cѣP.)5H@/&9І(T`%\HLDP-F _L(۽,C)9irրh';ꀇ.Az 1^@|Jk:i&Q=ZMV7t-^;=$@O" xdI!.=v(ΤIdD#EFi9DP?=D {hw Ed, WP~R}MDJY2 ȡh lͥV2yA-欝DV,Ne#HRS釜Em{)@E8FZ7ܭE I?oߦJ`O Qzih6umt`eSFuL!jIR)g$!8 E Wrh$Ti!v3[V}UQnԫ/ai8߳gtM u09!D놾bջk{Rqp$Cj(5d [Xg?`c %w0n |Ɠ8t=@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N?I@lp@K`@@kwz NOaZwϛ{=&mt<Y߷{@]˛ȧF/^\m\1C+G (72zr=;v}8eGC<-uvqZ]M-mج$jHR )@Kz?X68A $zItbɄg:N3'/iIO}f4'>iIO}f4'>iIO}f4'>iIO}f4'>iIO}f4'>V>/I 'ROb'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v=Y' @:w0Nԡ8:bbG;( @b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; Ntirꧣg}i~'pݹֲ\?ʋ@;$q p9zK@qZS`U졤+{(bIWV)m&]u`UGkcOWc[SLW=#]u$&]uVJhݣŢ,b wHsÕNG}(tӕo:OG;x}kC_ =9no&d5 r;ӡ̑'wwϏ):g9`+yBDo5n_wCbtl6Q|ioI*/g,V~V1ngy1/ߠֳ+Gy#Po2\NYeX_%/ׁݧp~=|b:hۥa&rɱX[OR.^[U*@[RąZo0>οq( >7xQX,ma=>7rF1IJt@X03䘓5gˆ~Nxx7ĒUJ^ ݷ]Y Y<ڞ1u둳55_{D[k: m;;1xkq6En3 (tjm.Ƕaj{}í/t=?ݾ}Ez?m\bl{Tnb׻kb)~곮lEWLvl6,NZ\ACN*s :!)RV!)_(IR͢ܮ Q;0{W9<` Yۀ?|߷z'C5[=#.c]t֋yM9%Nݏk/N/߽ao/笨ħ/_#5BW>m.C8~ͦ1xY3 j-טes]|7*p!|HgJNgOovޠMm'j2oAM55-|ȹu3} l#x}M'{ȳ8gy[?{Wq/IF0gCQLR]U OqHl,q}MLg*y3LiqS'.0χZ-W~wod M,^];nI{!5tnjWvawPa^A[q<3,K) l劥>nr1Sj JJQ8ҩTr|,[!K"`:+LKY)JlP1&D8lVܙHFܙm:{ZlnTZ߄sfۡgk8^WasUNғ=LV;]ek[Z?{[60M+MBۻNުLs1aݧ9кSj{dG6-AnͲChY@ˢݶOz^4y}\GK-C2=YyKc7ѯgke-:ްwɰEϚUnM%mwv;lJ:g͕KOl֗McmwiDLЂ;#f)` .pmSZX۲Ŭu-oe3Yg^mc4yRoNb4"KQᝦ$` H%A륀h"8)!hXV^Q;1#dΞM8Fw>{OnEOdĐ֩2&Oe!*Wh.NiM5 ԮYW%a_ZcTg,!06L "aP)"sJcCu`:k} ޴6B}Mh+8:wy܁)EӸ%/ LqdQSU0 O1S>n-wBIJk=7ajZu* /' ]ͮ6mJyEv?ކ4]E *%8Jl JL*pgAS9v9vtߗCV LXơ^^o*ͬIjK`X$lSs)Aj>U.F(@בYȿs *1K J+Oa Gnj! QN!:bwcTb0͌_ґ#gRflz#;? qBSA!هWJ=+]ioX& 1HޟcݦFe{_N>I꟯\fOJS-OG4=؇ޚ1_Tt]`@rN!\ܣTSwa&pg/&SwaJ`PVtC(XKa5LWtgؐ_%* 2; |w92>:=f!sޔ%of/wT"*5p{!g?]H DK@W܀ 25խ{Uf?KwՓ>Lo- Q*QN|}S-Ջ]r |H|x ;oBM#U6!HLY&> GL4h8j <~hEs ͆3̹zWKnRMR(&py hj# vRoJkңOW,u̖T1QCeINg` (nU؟#@XT`#)EY?60:Av018᱀ A#$(A&pX04$aQm@XgBiK5A2Z!B;M]4L;FDTRڀEL*!$L$F\H6Xq"ˁON+$ >(ˈZ,zxx[a%Ef:,qX(m4pqK$>Т D;çٖs,丠H9pY1F: lp6HF7g)Oc.*5X*2VF\@)`o󗛰ލ6$}& {Sh#Jq~6suwv8z64M<GbY˃pU<yp,ٜX5Z⧽Ti(H R蘌aü꣺:De,:uqouLqY,CjXXLjAuTzU%e`k.PHbh՞<#,RFXp4g) @\I oFS:i; 7>U?/Rںi zۖQzgz^Q4a^M4]j5.]mi4KjoZ#AQXKt*%ڕGe#ͅ2N{kg43MӦޓ9NV_J <3A9=7דl4[byS*1g1w8E-5r24 dh֨$ ‚R0.(qCvYI(J1,,EJ\樴l͢:قTh*iCZ6:90RmZTkO|uV_ƖK1X! wLE$>PN0AO]:L"a-QE"&JF8kx=)Q[S12#XP1g&-Mg[QYda68P2YNT>õSYhy؛7jP`8O_V:A61Lǀ5k *3S&Xcy +b%˛*QHQJؤB:`^es) Ӂi]IYbgnmGCd\EjgHm:!m9s I0~E+E "qkpHG!p^PI UF.qVqBdXю ~$ǐR] c}H4&<64Sbq߇$"ǡgD! l Q /1D)DXҠ#0I̥x^öW8sګ0EƚUOŽ)Uy@R KdOy-3ď1 Q[_0EaHk #C?FTi4nP FI 5ѧ%=0} jwI77қ3] BM>_,aR/1",D̚2""&ZH0<)c"b׫3mW7q~:km?e hHrr-}efIu|M+Kp3"DX,-,xGI4e|| i)]pٯtp8MbӢ`ˢ3X&_/Kqc1eqh_]:xaZ DI˫^9NR&,6 BôϘd&mC%= NʵIʭ;Io3;Mze&|mx-3*9j|ĽeF:Yjwh6sV:#qcbufSCf1< TM;A=IE& Lq`>oh.M;yҒNȁN:u(wnCczj΁EIwp:8}|3.2eH&p !cl`|FQE%z,z-d/0ET-mfFfN 6^V:~RHOCP3+>Y5Y lYxUilT6J!^ؠLaL iU򱋽?RAA'?G Wަ@*pc 1D8G ĉldTZuxKN cB7e y:{ d+ԣoq%pg RzưY4j%Q?ױǜ5* iN<=:OL 3} &$(@Ђd k,l^V8d 1H$/ A",.5f⑏ eDIq HV<]e{TscB)7j<$udXcZ)"VD 1K+tvCu)&Ht`%m]xo P#3J>>),N}V^O?~@PQ0- *X0B`D$4B i{<¿FQS%H%=|Lc6CO2Nly(h|lSL\d* $BH!8Ӏ%Urj%KҎy0o޾_n_pv޴0mլ|7goeB@ΌI 3X1BDkW2,&"QJWF3eek. 1M6%/|URo|򽙼}p7>jšO_#۟@L2wmH_egpidi 8&ٙ-63/|cȒG'.b^~H-m MGUX,v7jyXjp`7*WlpJ|QSxnT$yYY#.S zrNA KA? @,zwΊ^d6zŰkqzԾ-j0v~{=n= y2YngALg^?y-FyM+nƼ. s(uV}7 u34UqWDf`U!*ԂwqUճWZ `$ TPU!WCWZ-]\* t++SփW\+E\#B%Wz*!\E\Y{qUWQ\iƇzwfK%Mo㝇t-{M]UᨾB8WnU_r%tS}HTKuӥRtnT7].3>w8u+zaTPJ#" z9Z3$B! R;.0f^0* t#j_ym%H,l.mB9Ԟ;zU֝쮮:6l'v;xNPDbE٣LBkQH"Hf$VLv&It:N{:ߍѧWkRW8d,k0 vSIĂT 3cdD9ePT|] ԫˠSF0FAҐ%(Ɇ:#U8H-ZZcc}>ƪyjffGtW'Df)3x_Pp[cH:k}L)s4$$˿spQO ime34&B*.%`$( FsyH.u #;(sxlP꦳Aw>6(,Ͽ\ԊHMB&g>3yq2Sv5y?^#IW&?5)'^d q`}2#?i\M$OpٻV3VVZd GtP&2cq -M4 qucYE8p?,F科Of y^|-HfD#ziV_-GX! zʾ&GqR]SG4cbѵ1~NT?.T|]Ӭt)^܌vyCUpv1VkXsytҌ-خ8h >ޜbp8|-[ג5k[N5#6CfV{bĄQ-h(M>.=Y?O7tlsNku]_n.[gHiXh\<ʽŏxoD@\]T:ˮ8uz?n'͎՛Y9Yċ+~)K`a,Nr?L4Jh4>]K*Kך*솮jZ6]׽?;!&/?ޕw~|7?ѼL13HPngf5oB[MS{m6mz6[x]^_ ,ZXu5oGmhnbQ=Ί+-p87w>m}eU˭WhQBb)2T4N|Y?D:-J\(50E4J'FfFRkA2ƍtF{G撰Y|84o 8Lڐ bE!FHAy&&BTRΓEo mowtRղtn?p|J7w],yE=W{sT\1]2Kc:WcF}gqr\OJtZ(Oh|jˉjd*ɨUIB1JˬbF0B3ٟ~[dp8Oֳ |  m|ek<}zL.|2TBI-8y|>KϴɌ S墨$z9 c@Z@oy8QCr-2o_ < 7Fk%ׁh1ز#5rngQTc1P_uC=B6쏏=ݽ+s|{$!9bIH?bf4$P}q b9*M`:aBEq!H ؔ =mʤHdٜ749WgRmK^(Idak+㞲PedK[O|u3I-哋IE$ !dEG@C&~# )Dvh|gaZ#vyX%}JD޲DD$+%h dAƻ*bp#i- )gj{۸_fli[,[l|Ȓ*N܋=cYo([h[Cqs<< RI3n #j7 zHL~z ",iP&LR#1s)eTfMqa01ufӒ]"I;x.kwq[0'zbq|뢼 XI o` 2j~W^ Q} 1Be֗LaQZc :`IJ',Eո~~BF}5}+_ @_^\'?_wH3 Mg֟}Uɹ-WggM]K |qr| :(Kp3"5SDc)Q"!`%K9 QMuc\I|g ߹X}3pSnE6dZPbiê 4jp=r9xߝe.0;|>\`֝g6((:x'*cIS&iJ {v9 iM7WN;&/y^-8\֧:l7 2 hՁEh\XKgNv`IEjBf1<+:hv[ +2~;mjLQ)ZMiz웻-ͪf Zu":׮{S>,-(5vgN2<A.2ϥ Z'\gHy4Gf8x?ER#")Tf=2s0l"0AlkR 9eMG龜:~)ǡ]3SXLAAɕeNF]i{Oၱ.Q%c6(SZ8S}'\ <?@PQBB,ZZJL)0"~ uBϻ.S62 E * GT+De_|8mcu,Vlf\(q_7?MB@U,sy.0. !L"T5 $zƌQk)76텬6KWle<]ΫvY`w 3Is~Ӝ\-uo5L$G$翃}pm8UJpVfwVy{>r+zOz8K` ~t8'.Oؿ?~*|XigIR{N`d[ Ϊ8s_9ٛOU. M0` xB)u\O/naEhD4#[K}c UJaKkadɿxH7 rJyխ,ѱ@]F)۔˲)S2궇~ S0M51J2MOM?#]5W)~ ( c1aL 1$'ÈZ #i)ΈDu2b.5L>wĥTUˍ$ktW 3Pz8HJ%@՘`̈́Ե\ ޺Rp44YJm+w4 > z܄dÉY^>OdbRm௢ʚP L,LRc$Nj+~r`?<@|*ˤL},9^u/u^mUVr^&¤Ӥ8 -j U0.ï\0Mg<jyCmWp@ fD[+ReJg] yOǴE*?1| x~"/ ?,*FѕPOoz~(0N|B⠩c6yW$\V=lg)np0%sH[]F5ʠMo9EǸ.?S˥m#AVE6g{ 8Ԁ..?q=8Z-},STd #X> $YkZ#͋֎O˞\~~4kس2T@ڌVůʢOdQ(%Z@JF/tJÃ&r;[P e:.c[;w4/+r "M)^W0`7}?i5Uϊ]t-1ЦA)VkņaX~fz̫kOfKlw&MGm3n(-^fֆrOͷon6\ ǚ}I |FfRK5+:K=|+J?fR7G~CS|Pλ+s'x7T+w`/aaro;|wŻ]v.x].eﲋwso8S`K[ynC{oUL"BŗT^.U>]d8Xҁpŷzڵ@%s-N0vY4a#A2?9HysQ b+Q*U=>> _{.{j~qp 8]W>ކ?Ls/\:gSf:wZ<6trị`eKlW%ȳ[=#y_x{~1칪m7pY.ރ~ Z=nZռe4;,HKctR70H's&"KOf$-G~&"IJ8ܝ83lˡa*;1@ZY\Δq)MK/D2rxd(`;x |BN=v d9D=),>2AL BLpLfeR|J9ōiU"bK(&ƽuH DhN3o@dgD㗏iA0h^=}N0ɹ_{^/F= ~!Θ^߀W1*̵qW0U5c!>$?'`,xt#9X*YԨT"OL Nu0h4&*A1Z:)3,Q5HBDzvJT@܋((e<+%X`T9F$""`"$$0֊< Hg cָt׻E3hܻmx쪱% hWЃ/M}v$SB\Վӝ,4#Z+lHib"`H%G#8Ղxh5Z$&cǝQFG l0*GT KIwK!ققpv"%+B}uapk( NQq5c%;H̯oT_gS% ֔JJQ:8H@b_)ss {Jq"wI*8FmũϷpܰbn2 e[ԙr3nz8VuBBmvIrdZ_uńV'%ڟJuCw&*Yd=7 d %F[v]>ʛ-jn\ot}y`y6o. el՜5Vt:YͯWd(a,2); ܽUo1?7g#WPxqTx󲎼;n1AKV)"z\rbyc-lVD(ScNb<"KQᝦ$`R cx/bޤؽ7 #$)6^Ԗih;1F cZ 8De%d)6- ʙ ]Pg3U * XEd|`s][o9+B^v8R dvg^6\SlI|iI)K;@lKMX_>fmg'0yZ@%]MaGw1H 84+k fD."schQHLc{cKuۅpv[ Á; *N8П Kh~zY)GDH6P=zp۫-O[B}5/#\,Xi=BHiM2qI`0dDnKZ$@'CةعZ<뎯&%m 2c)I2i'DT9 03RUΉdzmPoANQ%sCHɆPeGgMѓ"h-*g81qVgOٵn7h_ d,/^p[c:k}L)sv DNݷ0QO 髷I8R1a8TByY]!}Hu|7 +`#L•?".ڈ p9YEYX""x1|n as$K2.c`ǏQQ陏%3kݐJ{ʤ;IO2cp L״T߃#n q;:6ZtahzAFG_[ZY9:"c<22x[oǕN^xZbɇx#uTȿ9lcH7UOg+T3yYCm.N7ݝ]cej -L,ETbJw$AE/K^E&8cZpmdkZ(E T;:Mx3yҼxXf `&mg1{p3)(τdY i|j7Vj{aSJ(Wr(Ch@ѐ-I3 h /m V%'ET/*I 8;ޥڑKetvk.d w=̛@\s>:E)*5AG.!mS}Sw2׹Cwep Nd,8ƵY#s:wW7qm~o{OUX_f?Jٺ1}Ӷmގ\='\AY&.2B?LZv0)/Fhx#4Џr<>xg3m_ H4Q%ӀB1FCV14jF!#їM#NjkH`t? [R?:}O.51:t5]if|R_>NQdyMf<ߖAR.%â&*zdgRN4uYq<)ĵ(/UVЋ#B 1B*i-pHcϪ^E`朶P)>i#UL'qYQ+rԑ5GL!2m-`=JU!gȁPth$@a&KI C*a5qԗÄ0E>kye{[ĕ'Ajdƻ*Rp# &Sh|KI YΘ6@I0N:fLD.v&-F͍D#!iFz!8-¢\묶JkEe(zŽ6Y7}t$ƙ*˫FM6r90&Kewo C Mv!5`nV!k 'FnIBQC?%uAyMAlu<؜O|mΛCPXIiLˏ^I]AQs9dH3NЙ J61>3W-ubL ̟],ڭ~q~=_MszeiM\NuGYezz dcJ`0ē`V81R(R%)$p`Rd7e䶂g:|j):4%?,lFEu<]TZ(CDtƟ:gYqN{l^3^1{ܝbBֽv+VϬ+%_NkԱ7ݽjl;e+]'ԃn0v6.B3ďx[}rBཥOou;4[l>,m"JH%t2_11SZ'[a},qh,^;5Oyt·*v9oew Z$ ]uFmf;dLgr=8_W ؎]Xg3ƒE) dun,F2wc_hebwe VK3Q; | JPN:f=7B"S[ZJڠ;Iɝ}2lGgW&f.zTϘyq2#]\%纏3h*2 vY!s`H Og&wG9F9 G2,Tֻ f1B%Ĕ#WM| O&#ˆj btӍ+zd#d B,o{O>rQJ `5F7Q1%93E EG{1Ȅ3`rȥ^KYvKcFWw^:Ź:J UNSQ(rpH'Ef)9$ggr w*hwVgC8g#o )D~8mvM]xN^ RY%"]s#G;="1[] pQcnf+Vܬ]lީmvn]5wzn\祖C-H8_yB w9vm]kMV=xL{;d0;!m[w͕4UJL7_w̲S4Go\0؀e%'RorȖ![GVUBκ:>-$-S\IiAΫ1$X 1g*gζ!䁳Ȍ4"|82qm5tfA+}SK`;. ޼JEgzɍ_1KH؇ds6@ /Y x)u#;=ؒ|oٖ)KjYf}X$~/a~|6KCru~.zU$g-I䥶Q"ODU/xd֐5OV@S]6&mKd J ['F /)%{4瞎d4żrM LNhHȬdK TPkKTO%I*-KSlrf(Э t6W3I$q,\MË%) T PXBBN/)7֫~bt4F{cF9(X+e0J*ԕ(f.ERQ*P%mb3،RF?2]ګv-Aw۪5Π#wΨɤW^_9Y]`%v UlaKL9̞;@eփ3p]]}~~ \O{V:DƧjzkK 8eg$Ub!L%ְc@^ѱαs>G4\/2_ NV:kR"*H!A6̜dV&:% m^u{msRΒr%ا8g"pDžhF}>=Ҟnz韼Mߠu%MBe1ʀ1zĐr.R Jݠu,XAz̪nƱhK#Q&8*Y鋌FEjHl$VܿGtpϿqkO_ MբYX="kܐ ͈e!}2OB?P}0/eu=ֿ܋ivp͘y|Xsҳ1PSBI!PoNějmMs:?A8S}?8 =,GGoEGFzY7^o>yB?ڌsk!/!4pAoWi`Ѽ]ŕ_ ?.?xw" E^dZ&c^rTzv'~w{DA >uas˓=+bqQ'+B&,# Su-bXM&Ps:=;y\i6'8FĽ" n./H{~401[Q/#r1j7V.;ܸ$l޺4b"1;]BTk`J/cXڸ󠨱L((+8W38jGkk(CVt[|,1ޡ%~lّA&zXF*:‘S7'ֳ摞 dI9g- ˜ xI: XK^Hkt#jS{.÷5l/ܔ^\,+HvCUf3݋K5*^J z qk[br>b|۷C-&C2uʦPL*Y-].q2]m^lLG+}"^M=VIg=&s(lfFaƵhg\3ȝklf~1߾>⏉> º"dTO!RS]M !yz(yDMVU VՒ/LA?YT 8Fi#88w(}6_7%f CDZyaDe ""mɴ4[*9*.QLG=:XΤcJ YiP\6FEٶa,c̅RJи\|T :,lAJ0HFf#CZ,l636BЏXU,Rn]{V&eѠ GG_&بu IE)E_HzBɐA5O91ky5J!+YKϮ'ے1b7nĎ|ɸn6;A=2u3>H![NQfKKDQoJIb$  iXRD#-df؊NcMfd)L#Ք"\Lf;\8waoI Ǧ(#qDĵ'A[Dl$]x2Nio;0`kMȡKȜAɚ)"zBXl &Yǜف*gb ,Y4aJlI+p:8 7Ff܍ r퓯,Uc\T#.א9x<-(EJbcL^5F#({ᚇ^*աGٻQ} @s(m)ud@΂iJpJC?&l`^4"w단hlK)dLR)cIǘIpE4AHF;SZ v @LT@Pz U$1 ˒Pol7aRw2;_(ۦ;+83q`tnyO-γIwG* [>{tіbXO%PԨd^ZCt)*@Pa46#Âa+Pr0hAiS(i,(jtPM935)"ˬD-9 +V) ].JV%IC3q>x{3Wk9tx2l|d<ܲrt wߝMwj=j&_v;+X6ܲH˨U-[=<Sk@|1Mpf_ QXvs-DG-/7ˑ[-e2tjww~f> "3AE.:FِBPZ1]^8+b@֏N6Q nj6fs, Oa%{U{\^,z機׎Nyr[+/*Wb3iRCc;6L C-@CN/j@Ǿ6-+vF緶zHQb #*!#R' NBމZ1u%- ;!si6KrYQmq=yWAOfvu>.`\Gv#vd}v499gޅ#up45 (Qv&@DwDbM/z^5m\aWѾّ}|k t!3G
MX1֖lN;(uG[بT&hżҤfv%K{/GgT?R(?ۘ b2W) H'h ;%Xzyͦ+<5dO<}4,yZ/M1RPL}kG |ޱ-" .Mф*  d9ѷIΤor&aș#rfݿad#s!H5ٜVT+k6K 96*"s!_vض~.f3;bl`Q%ZN⽸[hIXv:@[ME6yxXήCEzai`#dHs!Χg8\g#Zc0K20ys#]>ޞ.w3nr*rFN`)WdNipX"1'XEVD%65u1i|k{c8Q LZGmi) chdVWL% kKʉ)8-cs:Y(PHKZm f(~P-!.݀3HBN9QHj jg0@mvE˶qJ?/3L6`Cvz=&˨u&sZ>;n560zˏ։hOFX6xx08x%jv]E\:_A^q>M\Z~-tVDP]5,V AIDgr*Αd|g Ll Y}mH*$nFG--D B(r?ء١W|P׿50ekz7;Y$Ny.<*#1y! *qFz *E"ロmz|eW10i-y BHDH ٪\gxNP+wؚp ShS/]*h|" s{O55ƅhgp⑅a?NmFS@j 7e\&\ABI!qMGRمtL~l_NfqrhSx[S js:[Da1}nlU`$?d] Pzm0W7M.'+x?{xkd6ِ%Ae rVi[8pu ~rqP/O`b83+Z b:mG\k/ō8 p;ʳ,>jt~qFS,~љk%7փڽ~}8=14I}v*M.5y"6^q[̏.T3s0ҧ-OE\[]Iurv0F)}Od||Ҏ-cf(©6<퉛w7mI[:k6go3=.YQ8yN>N.]_9>/6tN1e'Z+ؚZU XܰG&6Rib6tUL{}< \$G?}#ۿ=z; e>}=Hn4?޴ᥚMs#viZu7;ki.r/,B r>.Y16HWݲ/|vM:gt4$PSiåĒ܁1 '@.OO~OJ߶Rddz BDpX МMZ(gU+k[GHX5K?hA7-Ͼ&), P#B"KuIQ6B9ګA=;t{9z^^倃{Ӹr[ڣ\cwO{K6ǃ,μ˒U6ϓIWl5֨Xv\FDvܸ(VGTlK:Ue"!mu?J&F:ZKobI`^pމ|U.=@ -eNj Qȩp6SR$*ɭ58W&=w'/Q>ڙb[_Ͷ1}azݰv=ՓLZRꝺp+ plZO8^9MLT"YRC}OB+ B}4w23r ;YnqBhf zfH﨤28 ,CG7j!ĩM2(9D O`gܻPg_.^HoܹO7tv{m#t5+w;_e֒|H#HbV;'sdrc'N !E\iKpYkMNymll"yU`)E!ubf85c2(pQ@\૥پoY/ |6[>ճgH0ztFܠW ik[>QUӿN'eN#RU]eVUDrJ4hߨq5G<fDj(HS&1A׳2d4P E7D/((KsHEs$9Jg0,&Ύc6}/Iϭkܞ]l5^ğWP*N,%M w얪]xggNvt}Hg; Qz>B\L6Wkh9^oj KWX.]7W9T*%6oͦB,YaɪWixo|Yx{]j^+y>d寿F>2Qe[*tkq.ˢ^TI2_SwIh=%twEwoѡQ4A7""hq|iWΛw/.$U>VPUy2|R 0hߨnNSW[42 <#KlK͕]:5ӟ*K׷L~e!9T,04DC `O4: Z9]}z23/N=3 ϋCA\:9cbg^jZz BUodJԜ$%: Ot݅I.t89D)FRcQ l"Iy899b\ZŰꋭ 7YWEϚݕ  mNf7\.d!T ݬt~g/ƵM"js0D2a*DSM(@t4%q,0F`_;#[r}x9ߵnΧE2o'#mD[\.=]N@]G^: N;x*2qwnI ]譼s,\#Yk-4>>ܼr0| '1(G 1c!*'`1s)Uv0~k 5  P|dBHQJ)"?#&S$elm 0퉢^!W{UܸgV.R2 T=V=\|>L ȱV\l/IB\jˉ,p+(J(+QVJ,͵|m^E]Zѳ^O=&OѢ/nK?HQfV+y|_>#J'B=s\ĘB!8#~,s7HU0r"UdA2ڢT)Lϒ"8hqh%7FPQ bl_p<o,R[lW5ڽU.jO=ZEQy5X'8qN hxsjʈ.٤l!Se)8s{ 8e=L0Rd=RUQ*KR8ۑq3Ky,,63B;`;ެ\&.cmCo Tn2} GlypyJI6< o ?8܆ayüeϹA\d,&W:(ᕈ٨{ĄaU HЦ0bg;b4)fǞvW߀QҌ2ʉG5DCf)s$)@lRDm2 HY$vIj @-:pЈ5bLPHcHc@:~锑q_/"ˆHDqӌ+PojgS qkwmI &܏U`\'XఆOKgIʶ~!EJ""i+LUzH PZ٨1Q #Uh4%D*@8"a<(`)eF!Q{4ŭsV9bk<?Ej|q457']gkd]Z括_\R0: a6 (8\IFْ"( &d4VPURqmŽmu!o鴸bv?8ˬ:sݘC _[7N,JRԂY$WVD3EmK'AQq:{"ױ}\˾ɳmܳ-$Ɯʹp g("@j۵Z5  4q_ F8Bdi׶P[@C 2ErzbABM>14ۇgčβñCGBGF*6CGu'Wޖ?}D2ʐh(ApR:f\"!Wm2s>^^f}\kĶt-PBUΛ '&2HBFY[Fʂγe7s[ESL1.? "8ODJ m/m͐)nb̅ ,m>T/]07WSUIP `IAfr97L-{7L% A* LVî2F R91wvUQi:v  䌑bW97}gW\C]!|usvdcWϑ]IMf$ .bQR/u\ ;! cPq,16_?~?ƓGXG' yINr fbk0B9 6aә\MgjvLP~lpf罊/jڋ72SzF&ipG8PeEGaŠ7ۨ338A/N;Rgˏť y|>;FU6MJ9ו{t`чKT_OP3W'650ůVlff]cGfY9j)<1ΔO9Y+2&qֻ#wתGQ v G%3P( E םzʀ3z@*,L®2;B*]=Gvwhz"[D? 7׋@z hk/pH"AE R J IT>Y\yvlM<*$DpE#JV#2%iPXMR97mٜOR7_.~,'`Ssmi%%PJ5"G6Ͳolw1(lw{uU}*h^q[2 ?h<0-,|MOu4A,N, NppC ͊%<` &+IM(9n#ɕ.`B-{Y^,n~ƅ`Y-y/LT-Lw(eys)#rT9?By3.g̨]M6 *~C^6y0}FY)GT0 rŽ.{%96dGxzG123en-I++y'$:N(O sCn91i^E_)fj*)Çx]\ڊԣ#ji>Gk1/ǽё_69n'L4U׆gx =ջbOubR< J]͛W<8-'1y?ߟMƖSR|5lp{ ijiroK o颩 J/C*#G1~z:У6/K:hsY괓M6ElEgWrܰ27!JEj}Ih1-_UZs?8U+~}?p>9525Z; /xjӯ8F UUuˆJ\Lxޟ.n'?y?oO)ӧ'=9}{< 2>fj"AIm#&i'?޴m5MViZzWh6n a`"T,D#X4+ka#Fe2"x\$1A$ۘ-érRAICUNLi7 Z:$xbD Zsa6*vt8OJ[o9ED("C8EC`XԞh* 9脺֎L9aYm?QAYPdqÈJN*4p,Q0&{xJ11IʺKV.Fq|um<imM,(FjyÂ;$ߵcz{pI:%݃IKb&GLZފJA4 !Ar<LPz"+(ך!5 ~[%ڏH;7B?i8A {+( Վ e\m/A:^U_=T_U ԴӥCY6JP %&154rg{:%* *7F+BIQsw}+3i;I:v֗%@f~_.W<[ގcF֡ޅgSˉM6[σ~><όTt ɐRKJJ;͵iG)0`Tq`\@'cmvaQJ|NJI󸱽̠$~iCO橠P,)[@fSMAc/1#9eL!;-ṕ.O3|xTb"eVD Cޠ:ZI !_c3$D(Իو, dǚv(`q%.AS9yF(w8m*%Qn==c$xtd5xJ*Dr)IchJ8z4.<0zIQ[׉ HFHPG>w)Ìsb9z cO5#}Ͷ_ϫḒ mAYyIJ5iG6@;,8r@)hD(SԪL?&Y7b( ac 5RVw82*i?Q6Ga4RY5ipb-}x1EА2YI*a'5S1,dBP(6Y[SxƅP HԓL*.6-IK ^JB_Z3c38O6*͸ؒ Q4B7rNVp嗪iQߦq?W'W zܤ"+Y:HHYt ({O\LU_oU0;i5:)SLV&ʋ&G%(\Z朌Yh[@UH%l!2Vtlk'Qp̵T!JlGZompfWIʕˆǶ(3qdćF507:PGc=[ImMY&g=@!r]q!uh*T|'s z<дew35-Mn 5PA O=lAkd ,{K\MMN"WYDs3!.Wm9?NO~qudVW⋓8֗+lyҡ#Lu`}kgJF礖D.<7\tŒNiFcy1GbadШ@%D0ZDZ#b*#e<I\ mA,cJ,MH2)Q*']2Y ISUGdvI(U׳`j*I?BGl9%kD)rd&S序u) >Rwn1IcR2謳(+Bl Ԇ7 :9pf}/)f&mif>UU =j#Qڲ; \;x:Fz E8^9Wcֆl)DaJt'r9l汈J6]$iC7qn/$TD2]K oKݗ1?pc<?b\ $z>#qKCZ/(ܠ'UzB5iw5o:&bTUoO,ݴN[1"G*kԗŝ[.d]|Eb" R60bVC:o1;a1 Enkkn{~F{ @<kD:&2Z9TIM::Jl#'l.0NPj$iׄ帛/rV6Oil.=W `C|xkOm~k)B >ё M1.vDǗ/02b2 ԅl}4ZxskSR[q!=eO-l&%)xpPL@!OZ 6"j+sی2xJd `P d\b RXNXW|uFVp1wnR e!I}sm2u%5ETibaWУ㨎@(A:|?/:<=-}x1]o^AGЍ9Ƚ$r/-܋I"8J",D9xܢ(" ,R‒mKdT#8(DR+(5҈2HgE`29jqIt^AX&=v_%IݿgOGkI[g\M m;wZ eQvF4 w_zMPP8mU 95Iϒ^$DI E=zx/Og[YCGcsM7!L(n..8/zbV1ZJX0,Kv-Ek5cEn=JBFR'oɻPod/wњ~G%/sdUsKGߎ͞ܤ7ln!3|:e\n;B4wzkOKyw ]k\۵BF_g;wps뭈yrtfKJȓ*YC>\uU_1<;9v2n3޵Fuo~;xv~u(^vwmc^roV[·0ܓa Ĵ }N*J9{JL.(B"SASڍX?؎[aEtFXijSZS$TΆƣgc WK6Z6H~IRtQtA%_c=R QXH#DB SrxOnnZiA/?H'ħ.>}(K0WFF }؇q[ܸ3"fԫѫ##•n(]8HGd]Un8tECV*Jđ4B]1`xߊ*\CvףtUQj9ҕ#]1` QW. F]U*JcG:@41pCVa骢J[W]X;Z ndhkaVdz: ?\_/M|7OsND#aZղJ߼{v;7C鬡b6'S^jL\9rYfc&gyP)k]͹1bwyl0]$o~b:K̕5v+5?LO>q;ˡp -&+=~o? BWTVH3]VyN!:5 Xhhʾkҍף1^{;sU{vo~j3}s JtmKDW ؉Uh(tUђ;]U+#]])R@b@tU; ]1\j(tU*;]UGG:-QW  *Z=]UhG:@B B I]U{j  ]UNWRHC+ b8`+q(tUѪoW1xtqڻ5\j8tpq8j}+Fi$tutejT]1`+`•ꪢjn~MIXRdvonDMWVϧRD(ʭh w4/vT7X"G}A~axfknfXyJgVc6-fu/Sh.K,F@uE U0>rHQ0 GVz0>t(|=<[a,F-Ҟj#n)P7CfN؞yFteFڶ륕FUr0tU ]UNW``@tjdpUr(tU*ʾBE 4$uUU-|;j+*ٱTXkicFOH]%%y2*SQW@-&UR㓺 |A]va {yL4-fA0 ?3~si& wVrt9V}[K_嵼`! Fm} 9f z`hf)1H-hO > H=R ԣ {O2GgmdH+oxR'j ŸTΥƒ29y7d(9TE歲*F¸(D4 rIܭE$zƌQkIx9z;&wݩ{%>EvIna"|.AA&P^gJ<^iGjd~sVk/;^sXekQ^*&,Qqm0\\T.bhnD@B[aؚB,~ ۭB ³ח/_au+h.{mAE@vI75р|~7}8E%f#C{Uc=z,*kAQLcH(JO@%$``,* #%s"R27@h2B<PgV&ށFǤ"@aQiK`@Z0|[0#@5KfDžbq tIނ5|#.L[E$&i}>0kT( C>6.|{|0yM٦eq= ~ *<5c2 c 9SDc~'&:[ ̊N !B ZWD"͢0%w~`&+(U&o[k}nœ?;?7^̦*8B.OauKRmQSmlZvwp~~kf%Ʒ4k6gm3 чQŴUAoswݭ.:Yj]_Ėxjv4BFRϏI~6n-핪ȇKSTKoO/^پ` xrůtI; }/iF'n^Ŧv-H誀ayz.?^!}zq_^|W_qH$q/}hoZѦMSiZ69hW&\ۥB(wu~m5ц[AUc&nx%8blaPQiRnwmM_m}YAF=iR;ɷɻ))?c:Avb;tpL $c !eRLRXQbTd=wd B??:Ҝ5K `G.XpH x "DfI+`,bdB6لt6]nilN9Fnj`r m<]>j^ju)j <[& PO(<L|2iɤO2vi3-_MyJk߰x}^x:e!o 8.H<ݍ׮{aF.03X(S"eߘNH~G%(1FK'4eigIzj c!H1wD[`QPx#B`T}(,Dޔ@!-a$EV t#g 'OW_HoX;IrNffPlxmZo|ffnYޗ?Ic 0GbL-@#1B0J)Rڹ[I#5 f_@SD"hMŢ( F刊`) NPB2 Kpc,[qh4u{OVNIwî8Y,!]Ynw2uABOX&SRq)N';I[v{7j^y5WJoqwSg}:1Wе7ۨ=lΠ:@F~!qǡ:l0ϋc/ +$̶c:vlBag`*-^"x^ #s"';MAJJIPbVIBq" Sq2ۈv ai3#l{|ڟT "dTo/!R gL!$6P"iטm#waXQ5/gLq#DV4j@ȲX+\jy\QV.2{6Jɖ~YIJ"lExF#I'jCϛD*j3XKH%SnR,(ʴ B6ָxphz(l#R XFDA526gsf\6[[b!X',) P"]axV~K6BŮ;pt`4*YE=1"D]kIFQI*>^pk}R"mXScS?]#7/J?'q`ruy翝x'8InUK82e v5B93x!vJ&陋>DK |qhcX"Z^. &`x`2PZeLms"]q; +'"6) -mRLM2+Qk(/}V+d0eaF@9iUȐTc`h&ps~a゙|:ޠQn9eZR<0ӏ9ح§_8D2y"UR:6CbQ9hUP1ާ'i %z0>5vjᯛn3dxe-W{R uGAN tl"Ej{rI*9x>{fI0DP QR Nrx+ $g,9$"c9)S#YPSg9 gGUT.J Ҙ¦|YQz*U̖u_]K36H>[v=9Nu*Q=*0O=*uRJG_M`=(z6+7pUEZ9vR: •U\ \U \UqKk%jbWoVһ=+7׽*}+Vѳ*T\E'_s{\]~͚]dqS0{ѿ oO|584ӜY~;);fҾ4בּ %[7_z^+g>h~<j-igfS8Ͽ~8x}\-? ^fg4xɯ|q6jӲ0軣9:xvb$-.O6SxqJOwOZ,ůKl<>ad4Uk-~;L+YZBAEXo>BPpw$t"Fs޲FMo[P#oެ/Z7t5^.}f\ ⾨kY\J ?RCw.7|ޚ?yN+UuiҢzzƏ OpgKJ#[7pU}mO㓤uҏz7 W:mWUMٸ߿``9p~қcˢX{Z \ɷ4H.Zyk𲲚H $bUu%"Cƅg#4 }#<~cf8Xrx  ܤB倇=kW䢋;L 1t>Z%%(b+v7v~c c?`¼x+YM|gV JND 34C1b"Rh A`)!gS\SWvR$aU$ k#x(UY?(ToJ6ve7qWvX0 $l:(T:$ip0PRk^o;~t3&5ӕv2#bYKR)yLP(/*"'d/y17mWNabȷV"@rkFWȦhT:5Ȃ'K):g2H]YbR4/ykIFcjP:b[3h{[;rzj#]RQ-l .J?[wm=d4ۓpxNP"ڢޘB٢ ,P, DB89v9vn_>n1|I+Rhu6^gMR$Y)D@džLъkknF/lHWW8TmlśK\*\-m?!)QDZ*-2"£[@]M~~wĀd]Uf{j|s 8$o0>~>&œTt<>5e2ۿ&9?l" \JmN& O&S80⻖$ZH''Jr%7\Ͳpv1ZJ!~Ӡ|`nvP6jZ;9ÛwjGh5(kfYF q81 W0b>Gb'c/ Syrlu1ɦQ*I=$ dF:+Eh Zĩxa?X-6lŒYaFr; h|qϻ5U SȩΦ|7_wgo|^}wF>{7g~|g_L!M$h~y1o~n?RCxA6C/xq -9qX 5h!vu^gdYب#JffZٲ8URQq4q!~y5?k&q\KQGODk^m~l'9H:Ɍ_K¹" ThԚ'(#Rz`UuNۿudUw'@MsfK(0 U$7ƣGn%IDT)e,j$).\BMxn=0R'V #YdٱLh$vu~Uu;ǫUz+.#( h2 gr2 #|L%. (̸!\B"qy5_RQ޹**}BeךnEu*xɧ݇6"M= KMX<8R Gi>7j"Pߢze'S}U=2ѧ,3cl\ JJM$T[C#8xS'Bc:9LVϻH\_j>VQbƛ!e$[Zϫ"͗mYkqw3zY.h8ϸbO_rI:eNsq[Yf:?Ucno ׳ͯW_ܯz3eVY/)4 <d8T!k{l4Uk}`VjZ~>fsb9\>ݻ[/o}·n'@ɫm/r&ű%R%BuM!\[P?ZHÓQx$nȉ2,eEҀ?  "nn@֜J(E/Q}9-g5F5CH{\OvL{,H{|8#<&{rT=ЁjB>ׁ4xDH\B9KR(84ɲgR;ι<:$ 'zP!N[,ۄ U*Űg숅Ba^pőoz%aKߣσ_Yσd# 61G) ܤH 1E/"ɗeVX+&.َn4 K*桠v18CNh qi,IKaD1eb$Rg, e퓐z!dH9jB (?IǘBHCuk~qP슈0";D#mੲ&%npHQ@m Qi5 F-\th4%D*qD(1E!(R<(GMqVQsˆXrOׇWJl?}9rx9]ۜ]a~Aɓ2M0ɢJkM pCͨ7JjS)2@Ą{xA#,lC :a<ʜҞj=褉,r 9%8P C\#isʽJ?MP-26>*uf^6S 8TO 8dŇUw=aa̜^P$y%^ >ѓ+kު>\Q'JڛfGj޻8r WѠ_8oM@qjMڽjӽlG:-O^Bqw^} %v{rriz ٳ Qd6dKڎ1UQ~M~[A|C?gMn]6L$@>`m&9A d0IY`gD&nZ2Dx, TzE@/sOI^]J6bk&mU8{1LYSluRe󣋋ѰY٫sd|'޼\/5Gsoݜ7^|7c[Y/)4 +kmH_ev~T=8AB?-)!)K"g4-EÖřfwWPe֗-DlDqUueUo346w(݉ownWJ)?36=ֵ݌5&NK=v5f[o-=y,ybA?*p&W]:*&9Lx~Ǵ 7hA)erV]C )C'SIΈ,Awg/Q]D}<9Er;%ccqYךFw (BZ:d Z7f6)m2nSy 2BP‚7dZ,qW{ŸNzt2.S?' QF2YcDxHH)\`mRL22<"g *%­\ إwLCêª M{ZA1 RK3[JJߢAa@ 6Js .5 .XAEHO W")eӊ Q7y3,|6ß霒'N;]i}RI) "W\9R&c3meb=MuնZ\x53xXYCkN9|R+jA Ǟ7SX@cBϾ2ƟGDiy m8: $Rr] ؐʌ m N.kCrmF3޻zL:b̵kYl/Ik0,oO/X.(CCuX<) zU X6r-_ëACPL_ܓ_vo~Zz_K~۾-G\Y{-׊uW=oȆ+dx rͧ-Uq=d{v ~ J-3 'cƕI(p~3>6gG؞E>lQDSC%Id nenQV.`s# IKkax:E˂-iCF#yj5rIYcA0A^i?1\W_oir#<lԪ`EOІ?O`sU@dxVީrVˆe^=D@Rx#p–ƩX&(6jQu y|#-Y(iRVC`^AD̶A p; Avm=I&XUvP16H WƁ>PPՙV)X@\-yw$qF5|g5C0>Fd#&Z% ,0DAt)jЗjҨp)SV-y?aZ]/$\D)O1\G0Hq F@h6/gQ LHde\ )R' *  IhEHi%y(I Xq Лu g,[6`ոBk>Di"j{zC/׽AޟcrlWE?^N+ߵv~BZҪ%t kl%fV;܈!̣O3ɲEos6wͭ;jS_QĮUNvܰ/'ȿR/ -UEǩoޤ筯zaKjv0Jc4_zϪ]+*qZw7_7:M/?;o?}s9wgx}7x~2B|4 $j4ܾ߯ijۛ75lӴj׳ߢ]CrC.7|EXF 8 Ф.068T Y16Hhַ1[S0̇J33ߦY8\T[uHĈ@+þnT֏pSzBdx˳$PD@%pH=TFAJ9s(*ܶ鄺kG朰Ǩ|4qÈJN*4p,Q0&`xJ11IʺKV.&qz9q6[imM)FjUÂ;5h$kUԗaU˜lv6dOcC-{+CJ:C+J4 x.(J?U-O&O/2 g:٢r-l5[Ԭ1b 8ُrN 0 CZ/to_M6XTF+s^عG[   !Ar:S Vz&(Asebx҄(ךcXE2`ivan2|qw=@Y܇q`}{T:&~éꪷ~T1K-@ L>|ۈH"hai)<1 y`NEa/*݅ynKβQFGυ(nKDL` E!eqODQA!T )t8wW>w@5_ IN@f~_?7U AA=N)C q2q-3zlJqidR)%$CJ-e*5J:+0~w)NnkcEKQYk;>n1*f8#RAEIryB!R(* bT-M2(3Y`jDz$&bZo-Nr4^| gYr4}qUgK,y}#V߅eVR>eDjsƭć@2RgmKh}ҕUV4Ht2vF{ig&KmDƢjk%NR^r*ybێ9{qv<NSgwwFA rE'YH de_+_۹`QVJ^#KL%GIG/9iF)B5!DQ>&~v5RƤB*FZ(&r$9g(4G90G0CnpR9&[ƝXcBR`:L|n: =3^dd>k׺~ Y*L>zTՙ})}Ѵӭ\#ٻH$'Lˑ@hsչzrd?2kL.UbKWϥ˺q8*]y rTmͪB,YbrZvީyYg취yo7+u}fͣ+2){*.Kmў/Wzp4ݟ֍jL^)ewHT'!d;Tpg\,Y ⳀsqiRL\i_xQ|Pzw ~D6ى``l8F.gO8?}nԂ/sݨ+8}QQ&xQRW;Y(1^G_B$W|P,꼉\wYuilmфŰ}n:&_xUa:-֏oLٛ*Cy󉚿-j;פf@E >.EBW`ہ X / RG^4_;LJ^_z|&\aL"A`(A WD)! "WNi.Y&QLGa G%ĒH\0>hy I[a"ȔA9r@Yk; c}DG;uw bTPlmC)g󖫀]9rW$6ŗ#v7E[,H9Yf%g)# 5#۰Ů&>~EVAjǢ` f^V58Q rr-FDL*[I/0y !$1ʆZMՌɋNPYe(r>V_r3Gy2漊XzXH-1PR`Vyd_eCD&au7I5,.r?,fAO6Ze0PIs>HFjpv5R iƑX2 OkӛŒlieϓ8 ?n#2zG$ ĝDzST Ez+GK+ =J($&j&oǬ\sLEc+#v5q#v q<Ԯg:E8ѣ6 .Y 5娓p\xEʩud.kSBRb,`uIJ`r /:dkIب8rrB#MN/&~<Oﱃ!ϋC`T>E='t 0ƨhQc()3 սS:8<3r\ ւYp1N:y<K6+rmH%+%aF Y}5[X~S-6{(A 'ԃHQmϮ xR /<&yh7mY H~$!L`I2ɭF&:)/m?Ə M3.GL)V3 ^"^0$ !YNνjK ?<4l)U`zB_Gh'+ͯ: ŴFKy[) sE9Ѡ?kql<>~zۼ9y}߷6~-6⋡AۚN!|\x]~+$VaboǶv~T|kv)fm \o\/`fCH[gƳs[RitѶKxiNc %j8]mY5Ͱ?X;:fBEt{*_]t;=,/1 D$&]X-2 QBR:m: ԓ9t9GTڎyW% P- {̢NRsO8F^I\Kvs58\WgU|Q*)_fvϔlS2Sc0[k|z+d%3ų4؀q[Ip>VvYɧJ?G$z>Y,bqHɽ7[fRύ"s/cK`>Ls|IVW ʣR'νSw~67˅kW]Utdc +iرgbG+nNώӢ>_ȉP@(&ـg >Petd\GCSQ"Ԕ=%b)p܍ 01#K1'$ZD`tVrf랂R* YV*(v&gUQwژE #rXjDQ o}>:XGĂ6Oy&#e( ʛ6*Hr-˴m y)|9^%#Aݺ 99D7 Y Lj+0$րW,x\H﨨*uNȊm]%8mG_KQ1ZFJ (fǬZ#Fne{LdgxӴ[vW/y {5k,vmwgoxXSp8yh@Ⰲu^m@.*t&Ƌg m|g-;k\w0%9Fz|D|dg+Y=G/Cs$cJZ Y#tUVw/#XVwlOLsS3zVqyIj=_b>TPu6iKq[mV=lu«;L%^wn2uEq4( KʹqzoOԿ?.R'\NSV2MڻBs8KZF׽fN6e(a_H?l/]'|ۯeD1e|#ϖTGS^ s{yڔެz ʋ,jiov2}gz3Nfn?f&7c_W]{/֛=x)jY&\__79.f_\q*Ur5s .]*Zs)pEjZ)]W_#\fЯxT^_vr3Ϟxvm0=*n/~ƣR$o?ͿrMz+[Я\p~1'ommtu߾UoBIu[ߕE7Re"]=Aӿ1 y혰2{?0}l~k٢E*=O:t DߦZ|-en/Ԥ+E2Th DӔ9͹mF͎.wUߣ< _逮+*L/ fS+a:~j0Ls]ڝ@\o~}rmϯ:'& BE.V:abH@FEooF W3נW84#kQm<{N2eȰ﯆4KvАf]i;AUQ[(h0.FJN7cnJ,鼙䐩S,G5EZ~Z/MU`XJ=PZTnܿO+ pb|Hػ})]W[0_zߦNnQ{?mv;f{/qsKZ S۞vk}9vL빘ʝxb?m4jt|=ȏ('InO1{ráQ4S&$ g4ZUrzsFVw)XU2%fEA@aJ^`؀ Q)T0)8;,{v;|RǸ3rSNʀh2\i'HHِxp2a rkUN r]69yR/U<( YMhrJV5qXܝ9%by֏\x8ʼP63v.FVݽN`$s"w C+FIk3G},i5ٛ7^sT~MrAZOI:oJ.˞cOZѮLCi`~ z!ŭ;m(i}Hi x(0)D\Rn}AHNl$$C2X!xKWJ4&=qj0Y`n0 >cؒRx+Z~LrbKj,Rl`ޅK sk oM.'.ouI)5/p-)8g\2q|Ύ?-Il n>Ogx <;NtVfX.1^M="ސhA&Cc4m49W3SڴsIJ߯ݮw7FZ! )zʖaP+récJ]Dg !Í(@5̩k}l^̌5|sO5X51gdO'Ok+fmohMhޞ➦XHp#Z?Y0w<{Y#4E+XVj7cN& 3rmu5ɾQJ*ףˍFZHXiv|~bÏly屾S{i+O5JO_~?O$gz;;;/|Mody;MQ\::pa?~?Hx$$$Sx0eV[ -- ԜuE^s=t\Yl!Ē gn]z\e G16Hwǹ'ޗي8r>PiJ~k b3xڵ:lY UdJ ?/dQ9nN $'S S&rM 2m-H60u A OQ֑6-_4],y!8k (Cv48fW YJ:БF\ʚ%<->?q[5Tw #lYc#IpYv:xWޤpz+M~A d\d/%#Q+d;fh٨@k^I XV%T|$li4[UIK{c(e+)-(:yuy dt (E⊋1Qg[耳4.jgPI%Y;Q$B@.u, !=i(G QfH90%b(W)!X'KR[2p 'ʷ.uj`rɤgZ)ۓPg#鐘S6Py0`-qt :QBniTI_ʝjL-}<qL&99wZc"E $d H!Cȝ`63EZ&%gWmiui1h4n0*?sPԞ]uHo/Z͛/''UCoHRBNҨe уy9유ƒ'Z!]BBixVR:0th򥂐sOm> `uO.՗/*)Y}&3kKJ1E18i"}sT 2dAJ҇P5su '`Eµ(~!l%Vn'Za6Tfjlgdl.H,%!dOv{[*py=qHۭk2NR0N'23R˘/J`V(Cn 2 넬qޗ1K.zR*3zIG93)j\R9E*հ UBpXxǹ{Ho֓|8u~5]_6hdJ6 99q^Tb%*v~甗b [ ԽV(eE$66Ah2vb P2 VFjlGl?[*桠vq lDQhS\FpwRR!ZuȅcYPì5O)Li[M6.x[QxE!ҢL&E6hS)ʜYE#MNPx*nbSCAjcWD8"@j)&7sƤ'@ښ%cM$3 ""ZΘ6@q"82>XH:ind!&-Hg82"V#g;"~>IU{*lvɮ(*qqō@hR8J2%YRs8i@r:SEtFF\.Xkw슇2pV1Ë79ޏIYqv9 mi# N|,׍*7t 9:=Ҵe s|OT֠' N)Yy @$’Z:0)2H,0 Tь =|m+8k0xX(tMN'$̛1oM_aQWL-9ce2,kΛ$ ) @LU:Za/PrG` ( R7[2(="JL|B;BD/ @:HIfGJ@)1D;ύ R RdQ>#͛+$NŜ},T{+A-'n0e|aS*ie,E8B ʒ-ɇP K31L@hwxbqG酞y99HRpqB`cO QGO?T9EkX|D*SuN˾xA3#˷h-hI'ǁ̌drG mnIb0AGyd"aY=<;T!8ahQjkvUDVjxa.AWh oQf)"02N$ɘ#2Q;뤜 ֍0x5`HL)V2 N^d)b2I"4@{ 'wM37G ::qxt=!o8:%?ueZ< )nӿ=rdޥi,nDZ:&7-e|w78-սŐڽҽFmzo;``ju ץq~~ٕl%VC|}ck~6`OG&sq 9Yo.&雄ԏXIvή;.,fWs4bv~^Ͻ➹Z1|q@MuP&A_u~orȎͧd13MefNɞITۮ[*m]ޞgˎ]=ԅW<{hP!םQ8yI0_n|bQYOb.w/n)Ѕ"E[Q'} s6'#]'I`=QL,ғYHxh潗 "j剳kCĩtUJag % 2Z E 3fRsGKf (yf*Wv`yZ|:?j{c̶r6G{rc E(c3E +@VrgglO45ͥBT2S<+L H@%1*y?Qɀ!kbłOɽ7utd]9_d_:p*=B|h3ݶ}oNBt/:NAm_5 e9SY,3~hKYU-~sFYdQ"Wq*mj|X޷N%N^sGB<)f6)Ѧ J1%|i< (hR&Ymy[{礵>ZdwT6E~I ̓T}7?? 3jQ.<:8ś_/ =*jF]f$0Ё2oɗ]4~)M*~0;fC8(tK_v7i"Wzƫ9\V#zɹ&~2& _ `]ӊ6^|ٜ p wx_} / R72vz=!ȣqJ&}Ě WE-$!>\$ē ;aԉKHG#Z Ep̕"gu<Т`~Ow<`~/Bo*\HJio57# p&kcVB/dMVD-yW v\blbv΂grXm7t$OA%2je(滎5K@zh--52&`]94AgK&bNQ)~/\hDMlThe:,$!瓷Z/I0i43gD.&xmiF֊L64)9FdD+9S~ɥP\ +M@d1Qy$64_*i]4h4ߑ%e%EB'j8KdQSL{\FMgj;ve{5SӯPM[ιY *Li/&n[pdx1_C+XD > V ԏzJ#)i˓I阐:d0Lp S .D6vF.# sRN(_r2_O?5 f ԜkDLBxܓ @E*E 21<Gl5nۘ %r>+m"%C_ gUi]Le}R L22} b"Q5;3/(N=͹dy~uu/  +Q׫4;Bh: L{\JԾL-2Bt+, lu6To=؛SՊЭx?fmR-rj/T ?=-ލ4\ϔrğ_aJ%hU|J|nQ&CrZH$`=oӗ"XmQo k!erzZ18tlL,IJiC_J>|#vp("h 3ZǕ4^H6*z)v뿸&0}>i!yiiS)3HF ;eCpe8 (Μ:(˅C#г$d% 20"=ZbTFTh59ڟl\钀ĩ\Q#LF8g8P HKĶ5rp^cx7MG熈ݮ:[b;.i4ЕC,.tS&P_8==V ŬvaTSFYk816Ղ"FES^E%t5E;RD8+3)P3 ͹dPܡP@\9wKv j<NӊK V \PST./RjmTȲ9YWϿeƈ1vƗ+֪ HN6R_)rTry B M*$)G9&h~eyR븕2ߪݩ^(b zAGa,ehua(Jge5r6Dn%k1ŋA5rTuR V 毛_5(U'u@x'(!cQTr "s FHM(r9vs]1<9\1-Wjt J0xP\G22E 0Z8#"qkPM\)HP4"N$c CAe-k>q4*HfG~na݄UA Z\WCl";`3(ZƸc4 @<б57Bԛf W!0!#diH\+j:AjW*[=6p3~9%Z`RVgֆ ??wŁWF-E,֮Ds90{D}uߪdu` I75xv3]7Ջ^LO[1J91K'r%GYNޝ|p`p,YR}- y{K竚a+b Xd;M0A`O[۲vwdYJeʖH;nE^sXEbIQ8yN>,zlsx9o霢ݝcd{NvW؊޶:~qFr&: + )=IR}[i`qj9}yb~ry5x>_b21 ,21L/oSmZ[U3M.pp!ۏg2}ޟ}; e=nW']&d~})k{Ӛj[45Ҵ*vwhВ]hor×/C9~D\:x!]U# :Y({BiYxM)|E)sn@u=sj+ \xe_:b5~ݢN2c/D("J ZaQ{2sP*ы>Y aU/?<&t뾺~7w)($0e~b-No 8]!]7Onj+۾|I ]y%eX ԒrlnT۰roW/9 F:fQ\ЩjgHGy^&.hai) <1 y`NEs:'eWEx0:z.|BIDN` ܙ!N H7?OIKzrX^;lwY;|O^1a;pjaGu0f ̪K"AU`@ WD%! "W}HqQ `$'& +gEHE)IMB_H ࿎|zt?š:R-W?Zo pi e6\'Ej /PrE\-↏p(thRD 9/x)8]q N^ɞN^wYI 'IR.E㓽P9*?M5! z7s+#(.yS*ʈ6pB"(,oEjp9M@)C^{-THAV R6!HFbtW)fƞX0 +יn6cٗoGo@eGh8q֜{k0)R@Ejpay͜Ϲ,l`,&Kpb&ڎhF&JFb?֋GQ!1**TVNT.p )fu:=ea3g'X?ˀ2aΫN[!F2HҊz7y.DnA@,OCl9A j`PKS ĔhuÁDVHu -6SX53gG'ϩN>7#rj>FC!3WE\}(اO~Vu6wz{X#e=9s\4JkMxkF1TRc"DL67I 4‚ r;{`P'q (j=I w" A2#|0K^ĥI"Fref Jq&pqr&sT9* YuP̜a`3w:jb<4C&/vKHu蛱Kl&nj6SO7<$\BLy攕G/4ʐaEV9p LCO'HZK]BHQ(HA?#P9HJG"ȓ<t8N_9^ h0`R!rKz!Q'a T*i1A. qЯb(͢*X!z)*a`GY/B l?7gH )As"'ĢLeFN2 u夶=?Jk3`%y()1DE GC=DMQ/:^a'a\qX$L%Wt'z$yR~/%^#5834&V79D};jNξZ=ۥzVv^fu~YkB{vuUnȺV/l5[uU $.fK]mtի꥝횀 16ƛ:Jf|q3]cՉmdN#$|ruXU4|(_nܥ8͞{ {`)CG1:;kp eyzԏrV&sNu4?;3Pߟ&+~ۃB֫=2KYp~qNrIN\['wn38pħ^~9CE:$PcTn6PGnjrWE6cw3X%WJT Yߜnq6ٗF ŪԢv4vE9;[~YV;8{/Р÷!&{=w5,:1grZ/ܾ I8 !O$K~^/>iHljǫvʣ:6Jk9zo p>;M[fz&gwq{E׎Ƴ\-s.&tvNۂ6}wYv[ -޶ƌѳf|F:_48mx){^~6Q∺?vKîmz?#@pZ YHtt ) Q8縀` <羱zCx;/BJ2bψM`Zle\(*z+zyt^XiRW= GU8ŚP10@C^P !=aFv+RꓖSY 4 +Tp*F> a`9TRɿb@;]dYN.W˥=ثD87rl9bF_*{wԺq/ lW>ۿsJ/*-^ZR\-n ~2tJh^%e|%<}\YPY+"*ZDi6}bp yrǼK(3:NNpx4!kƲRkM'BZ 1MqcF}VnOO? A1 (-+M Z^9l%)a FB/{.dxV9RbZY(I?^E>Q1}# >n[<Źg—}>!&(UiK"OR*)dk␹;GX VdL@=/"eoo,M}^n{UêfŮn෽y)}cԭi?˃yP,r9F?73mȉwoq7"߾.~ "dž9^N_dc/9L4>&V3/?}4kՁgIVJS.j$ԭ \:sn޷[w4_[=Anùhk iEW%Uw] arNƆfІkm/7h*fȩΧh`qWMUz:5 60x͖v=G7kX NO8_&׷A`YH .O-Ow‰ŏcxvZ'9BQŔ.R&geJ*o-?W ~8e||e1BjwmFIo5_R6,(gJgAIR)3j+cIiT}`tE$ڇ$&7B+muIɴ\H㸓җtUk$^6P<(@'U06}4`롏Q Be-ȥ(GಉL/cG$c*UE^Btۆ?p3w;0p1.p#kV4&X-Un<ՏlaK3Y_gwt][Op~ɐWyjm8A j%83(Iepb!o&JJ Ƣ˚bαZ=2wghrnkM.zśWC$\l2iIA Qφs:lg7 =lNYR(I;ۼ% D.I]5IR`A JVu`~=V~K:1ɣXu/-,69~J lu* Q+_>EyR_dncs:-X@zU=c1KFj-o02Qg%3= u"Hۘ;EoDohN &q~=ך U%0?rEhbxf]~Z77j+3E?I{&9aP]oWWBnTrepkL҃yf䏭'?-~~m茷mgçḌg5G/߯fm@0^Mj!֖ffT[Y_,? fOZ3Xgr7mχsrrZ6V6\:ƴڰeU_MYA'r0$oԯ*ma04N[-/_}n2OO/*K]2HIy_M. -]^9w J| /XH?}_~w?}x~; eFlgw{N5j[6 ^Ӵrԋ ߣ]]yK. (t[C< l _O'mGJ;mhqG3nlN5RRT=VZq F=v0Ǣ#Z!OI{0ɡVge8\{?m*ƅ!%mN .zQOKK6l*܍鍺{GiE?qli?5MZT•hbBH%hLl ǂ B)˚] ["PRPBMivuuzc5q1"v;]$E}X7^^ƀ}[9an`3jN]^JqX)Ʀ(R ܠ3I+Ƶt0=A3A,qJxPTO$&xSG9zVCT(<b1h üy޼ƛuI`s?.( iolH$&"C,b*:}];/W!jgSr0Z/,c*JYgb.E] {Z@JqV7 ;q-s?bͭn팹OOj-ݬϛ[Ϗ-mPe^Qr@rZrZk/s\F[!47ѮmS>ϴ4f1EF59bM>yHoA.YH  &r I $L Hmt0r0*N19../:q`._M,rfg2=KxٙMpcԅ ]LPTހHR\p(XV`h%_eMQ-܉ b&D| S$ $&_;KFuLr;ɬl]={3X7_l];^&P5_Gf*%mm);gdYov[orx#EHBy%Bz ! N˱`U9bTFJl"[PG/(G-!rbDfEʽD뢲κHsDh?j.?Qq[.MCKr{y7x'[\{TroIy:vpm /^W=uƵSSxw'wvmZw'PkSokrK7\-Kﮞs<}ŬqlrɆK6պ^7|q Gk%wjmotSfy4[zs3/loс#Fji.,rv/i뢝m.lwwƽu˭_6W(աm&cT|ڌ?}:3dLv`mMl\ẘTMiK(u6GagVRΌZ'BpPIADevJBowmoQGdkGr+w`xa]2ɃιK)˲f!!j : @{`r"@JZ8TrQZU={L?YXo$ґ)0བ6r_>u,uaA<\2,˻~clBgG&֍__Geru9N@:õ*<~z/j6Г1eLOozdŨ?B_[ mA#җ랬;sMx. ɽW+P$,S_|'PtkU&:khװ٪y-&G͋Awq~*ݩ?E2/A˕%i6y\~Y3_?۳~$fۍS4칥Kx0seq"yWU-wv/Lu>mbYēmzʊmq ~%^sKV·"t<xXb$KE&$V(ٓcFC̩lIc !H7x͎~琏~6: 7x;zvUE3撉^'"9%(b8jv<>plcPq`cx}Q10>p yJ!/1ZTȋ~q6F}pfb`Mv(vJR|R`N]&0 c')N u!Gw.'\  ŹXRbx |k;`|=ꋾUVwXp y2WܓgL"zzDGJ͌XN: l{ uq3zT ohSQ+3O}+qWmMSɄfWtb}>UZY)UrS.dG]0J+H\7] @mq{vڳb˚rɅL `,%mP,ʨ( 猷Lk36;mWsG^fq }fv"=:9+#GG׳j>ӎ.Lu1g QTd";E,tW3ȂGHH_ D ˁcr+IB$22KN[ETl]VL)5H:q ՚ )5E)'V\: y%sz8C^+1ؤޟ];{@A^ m畠BAN9iaE+ j9f\2.HALXNGt\zu!SɑP.&)$UY @z.m ^Ot4 &~QuHg 7㖻8n7px?>*_nJP\p[z r+ @!0QRdy>oۣd-o|t`o~ۙf Ů-nG~J;Ԕߤd!3W5fI',Ƙ$1Sx0DNb9, ^ V "Ybį 0\]֚Xt.O$]4>9sFq*wW=Ӯ`yi~>lqCP?|f#|ibleꗪ_PřV(̿x5G[H3ކe{3=&fojL?7+_^M>=O`._0TCL/ߟ~ó|]f'~b%}{a\%3x}4erӟ&-ͻ{]3|_4xiw/ %ߪeS^V13 ~돣Ṻ́jFuz{xGnn%Z5dԃgt7z;~Nǘv4. iu%/bc( 뤔SM).;RIQ(eD9}lh97vnP?#7}v;OAFjƋn rqs,&4RB]\UȚFx4{SOyhm(yld}?㫏02U>p(~=->G:Q塶7L۳Ťq[yk_+rol?LkOGbJĐ[D !b_1+HЉ!5!tH EbȽ־914lik׳ hN9KNiɺ |fYrH_؁C/Ic{/I} 2mYT¿$.TPĵLVVޅ,)wc3u$\t\ࠪ#wٛƒ:QDJ3x-h,KE_>]R.zIVڳ{1SMÊycPrM|aB]Iws2\Ia`W~uUw9և^v Czy]Emvh;pܝ9.ڵŽi%SfB%/x\0/e5zI+M >K-x'-hA]h(?0v>̦{EkFM0ߣ۰4j0.F+R> 20pv驼~ye|ӏtfz^I(W\$)v%D' ^BAkxGDNRs=;=>|;gJ>to}]=9 NE=R\EF1ϋI^eW"Y' &hNLKŵLFtN,8],`9T)J(1iB1IQN\˒kw )k!Hfe^*8N a19' )3HG%5g)6*KQe9eDHؚ,#U^z]pVw'W<$g1޵s]]&{a;)KE.{8uL>AB`KBhHZj̄)BH8jTRB|T &e„dgx˦b1JXx`,yQ? iaIx`5"f( ])Du\X;e ^lʘ]R󶇇Ɋ FeS iel)y-cK@Q> l`aZC yq8-g65$Hեo<ت6JTd cQ`[IobqYzZѐNVϐBt*\Dg!0X0E2m1)Y0ki PXOT0{' J !QhHX m)l ӱ$0WK 抩ɔ l"hB׿1+;-mKd[D/O@B]^j<`O s3x)C1hgAS@|\ !o)l00Ř1MԄÜp JE!LTY}`@gZ9%D6,FQ V03I_~2J.BA3D)Ӄ ` V5լbٻFn,Wc`R6dvg,?$N,+D-9=ږl.Ǫ=ݥ%yy,"< eotK[PSS!!J6rB".];MHI(eN74F^CU *իl@!Ѿ2y-dI-)V]A er%|I v/UB1%dȏEhB>8Win"<38o{ Mini+EI3f)48eLN=H %_i#.ìr [7&.\}-/*^]01Mj mP0hAƳԁvƖ`݆4dM1T.EW7!jZ),A0)Opv16z$_ScBW ҃\WTx Q%$iAU &T HW6xQ8@=K@GTe 2jnuGM "q2YɂNe~D}%2T.wVd}p[:>rָ )h} ML4*Ua+xm a,{إ*:HA/:s 7D>yr /РLFҍm׀R"Pej$e`w PJv%lrJ#2Z] X- ӥ#C] d+]"mrF,zjrtE΃ A;RqA c~6 )HHMV.=H5Z ݑ8dDYU#˵wCNiXlDt9Af ?hB6HeM`kjd00V i-iVЀ,E"޲HmM#-}zEU%PDm5Բ7;Mw3(a-t)a@f QZR05 at;BU3kFٰ뽊3l85f@\b@0u mIGB\z=;TYY:jm35&EZc(jRDr%BV=8Z@ƤQDw-߲SхoHtP"ʥ-NT`=@e A QDe!0"6Q2l ]@[W2 O .!IF aĊP(EiQBQHml,(JFd00TKch\G隨T 9 986+pc #f7S/9VQRM&ɋC(2ed Z vrZW-Dv*4uBh! L]榖hUX[dڀ@nc/;`u- HFH-ll9E+B~ZS+ ܙ#K㔲ܳ vc N}6ֱJXU5. %_JY 6iUnBFRCt ]HtA W5>!uhx;SUB1@_.Яz-n57t<.¤HE:_Њk^1 ׯ7G cc+ƅWzh)\.Akr_MlS6qCk!X fy˘k@PvX]ۖ =[*/Vbv {2zWkZ[G|}*Erf -(Sd!(| p?ZŚvl6lWپNO/fq~}1($lrXnoҔ(e ]UkګoTF;|_g󋘀+":Mky4s»(x (>6ꦠtFe|."_ skDfާ_\Gay KqTJRte=ɲ}݋0nq=YL>(yve^!蠅o0}/STI2B,zTF 2--F =hқDާv߈ &7}9.}۸p);vZ3K;@%cX?$I{O˟/PZK<|3yEGiu؆:<h+/xxn,X!>|F77&&d}du7&+7|xx]D ֲ?gZ];\V2\DkO-ڌú7 -]yA?PE 5Wf (hML'ŭ5ZsY>c ) V#VXw)4G0 Խi2W4Y̱4ڄf~0]jwph}0㺚|\O8#,[7[;~L9o~追Y/#63 [L\3m7pϗ/Kk)E[9Rltttmׯjldʒ;1sU7SETH\>A?/;Zf92 C^ҢGpK!zWd}+ኬ4•R,Įº{î`ኬT̮^$\6£?:?"׃8[:⫓qzg[7Mޏ+n]dHݞsU4eU&HWT6ETIB|zz3yZс=fl 6RZeJm_ 6ӱLe\t_ vq˃+:ʇ\-B_=~vEVjp w, \Z'J^"\yM}<-Ϫ K!{ ;\+D FY-zW0h}naֺOP2\D*(`xF97\SCzEZSii.&Trs .:D*c0 p.MJ[|̪gd`dlP}ZwKٟܝ5!d/ A!zϮקŠM\pu7q9\̊ìe0+?.fzh׷rV#GpE"kMypEV:p>3Kr2QpE*qpEV:pJ;#]#]7 jJ%•qK?`vEAY)^"\Y/#"s \ Y) +絳}2\OBR;\  +)~ '5)M޽ۭdx0ۏ9]'ӍVld/qdڛ)ﳱoqŋޜܵuiio;^zϵmO ڸhvY$ nvàmN\rv{N]45BZu0MV^ aO:~㰻#?kt4O:CN;xXdޘ@_M|bBsuY]ܸ¨G`[Oiօ\yXo޴EGy{jX?Q}\M(Cn=[Mk".I:ݏ^lQ^8_wҾp׏%y-\_mkB5(Co6ҤrԮYCla8\[8Xo[i۵% Ҁ+n}h+Am޵/?/˛4|Y_o_6rQgtY7Ѕ]DeyULTu(EUFo*KDP:W*VZק57ks?PzinyKqwIg W"oĴD_H6D@=pǃ9#R&OރOF#(gz5#y=e<0~ Kswf 8o#l1۟f7n}"Ӣj 5~Xݹ1)o;}__ҞݜSZvaEu4U 3^%]Mg? gZCr!^}fd;Т;: x`G J4$;GL~ٞ6tm }-Wux|~9;?;ʟ 'Ko4oD'W󯤘ыmsZ|ъ|㪝rCْt޵6v#ٿb엙(>*@>dz2b: |ۚ-GÒ-ɲ|Ւ nK&9"{ŜyPx|&7?K߀H:mTHzdft}7)4v\e2QPX\dnhD F S@PFHs0맀 k!livG\ɧ~]UxPO !YAkKhr̟Uf* E% JZ/ëgb=,lK>y|4 ȨcMIx˒X4 e-;8*5+h05/{K`eZ{ 7-gKg,aMՒc]ji^]#5u2ݢڍ)̫?$SiTTySRK$j$v5 / mVZ `&ux@x K^lغUb[[q_ x(܊ UfCjF/u Cq9[U::`NEK3W2UhU^YX!OvPu7Os%cs%V:K1r|'+"߾|W?;Ǚ i`t :TQ$@5Jl@'#sx%yg9p3w%1=9 R{Ϊ\T/&U#[l}5{\&4ӿ.:,05(;gdR8OxXvx,;؞; EJJBz3&EtR}(10͒R)4N71#kG/(G:H1 ]d%+[ԼPr| =ݼ{m<'zN+⩫eweW9jL\k:s}XDavk"s]]aKnQtt]qV*J iL+|foz.wEV7&A4/j["czOf+q3[ Gl1[=;[ىl\UȚ: bQwő "iJy0m>" 6a68#LY|5pF lw&d;; .#sx‚׺J2H/arx =nbU"6>TG)6xZJ Qz4ޫIUYC<NuEؘtM DR)9~k1ֆA`0fd:֕1W))׳ )gVNR %*ֵ%W$%>) 6_43Nf3a` )} IJGQE"xK\ *ʁ%.D4*qI^iQA[ǍJ*(F.EDE@(\61ut^F=;jwߔuC[Ի7a@^5??vc+}RS [dM<oѵ1gW{S]v~" xNP"rQڻ JRYE(a5Xtu;9vǿ_n~x,nx}}5ԅte-IJdx&fAE+&:% M_ }>Eۜ3$p!($DXjS5U֯ Sqfk9RL61J MBE1Ycއs<@:OL zlnP'q$Y8Vm)SR`$P6G"cƼpQPH(ҺKL;\)U'_{N'8?ǂE)j=ɓx38WrWfs5zDjX[?,0 MO9?\fOZS{ +=O q=s3)Z+qYͰ-.)c3}6[Axf%!(hqZ#.❸dO3ؼ.'u>:?C<:Pmțt{rZC2%ogo~hcgiڪ79a̩VLҭ]o7V&]AvdO'[w'O~h|ws}6"8[!Vυ_|l2vQSl۳p9sXגwk[uͨLlfqcEt9lm`ź/og=ksp19'wNͭ:u}eqkԆ OGmꃛ1ۯrR=o<NWW當0V_O~e`<ٱz;E>ꊿ%wNe>ɳ?91.湧Ҵֵ gN8<>I3V?߿ן{^*߾w5z: >[?K4_oھijۛ5=lӴߢ]]vyM߮F}gEe Xn3}%8RbclHhig\DsU8 K9ڡj g;30`?FW`yvM(p%ZXLS p9 lYNU 3bXMN6t}svzc9[E .O@{~ 509[QN1`oux+c M|ώ&_#c]DǑAQ}NWYWnnRCA*gA*69T7xJ͇~RJ&Ȇ$& Tt{Lpbt%@AOIqzaS#f:]4PtBZ%8ܿgqex5Z&-n䶾L~kֈY7KϏMmmP@P[{<O ΃4q9lIka ^׉֘#¦I'9*uEȨB{g̔@JA_3? E(1i ޣ69ZtJ zLdLF"$Nii#cEL> fİFGɎLJQILJ'gw/Ft! GnI/}jȋD*i3XõބS*LezWXb"sR7.w@p:Hi 6R]["QgX3[bo j r2_TdfH[ߎtdO 4||0[lu ȶIE)E,$QgP2$HSs`Ɣ6.UtmR-uGT^Jh% Bv +Ǣ ЖDE$;؝Ŏ˼@1jw6;jv`7: SRɚTL^fK d lXD!mTG1օ fIc[?YE'"\Lf=pgG?(vG??]6 c,;:zBŦ` xihMixgq`=StNg\3ӟT%/2:˒#,߯jْeeKc["o* 5A)'g) ;G b%dFzi` GNS>ͷ^Gfαw;Njɫ["|Z<Ҩѧ> W<$'$[.!^&oY8-pU[-S]^$M }Hmg !Oּosֹu/7AFub#Z;>o+] P_ecXֆۏH~t|m4}wvx4;F}q"yW::}ݶmK=O=@mC告6ٶ¦gϝ~K,D&bBwKm/?##q$IH<Pb%NG ":rbIsD0?~NDm}vw<VK"̠NY``3IXJ9Q@TqU@3ЧzgWNm(us=Vp{R0B yF!/0yZX뫡Z l)'-'0h NZpjj#\+K݄繿̾تvb+Y !z1@uTs(+4˥H˷?%} r +O& d{{ګA_z=7ڙRg4q\d u j!u2+#0xRΌEH =9^Osf}{]:U0[멏-Ew}#Rq1(\'?A;dI_R+WZk!.j[;)@hӢS\ h딺,I#Xdu1d=NN 60Q@ Zi7+ur)mZJ0;cx'-T!2;k_l UzMዓMwk` u:Uƅ&{M8 77ILL ;z#vjϴCT3p=P od %va%IO0עD)y5JD+C@f$#!ѧp kmADhA@7o(eHpMT\gSG&so[Wo%z2+4% ߽gtr\looyVW'I8cgDP#v!'k??M~GjMEvZR 3ۚ VJHǹ$.5 .@S"$Q$, ' &N>jۺ;8t;n/_۝]\DħO"84 >IaÙ36a)aMƄyY,VJ4;D˯#ѻ0+,v~k, k/K^DcZKdPC/#נ![̇d(MlR|mu!gxqGqs|F" C9'~(?n47`*P  \CBՙ2z@pV`*]},c-p~J)!+X r0pUP t*KuwWUNUȡUV2R \CTW(|/aWYZWYJYw WJ%vts3v*m껥t)S WwVlj}ָq P`mu U,Y\;Lg)ua jK6`2iQpϷ8ގp"ףp5bi, xSGF$}:]&zg'DL}.ыf+5, h< ?p3 h)LQvRN(٤f|[M/M^7{1ZvkI?8KHE'vL0s*ꗍ$e Wr'qN朻ZX#kcCE.gB_֦nƔf9o%G u ЁEDGe *a*pU1q%I/5l(5o;wڀ>K`@ꉓ($Sj QG Dk^vr;Gj<ͦ'}|N4 'O_FU}sڮȮ Ҟ#p. ashťPm|mʡ;<U%Mيd)nJd)nJd 1d)nJd)nJP(Bҝ7Sr7 y7}kƶ }ΫQ[.CtV@phH FԳE7q6y8}x˫oZjJcW?r;'a/[8t_(,F>q*.HLoWQRW/l;_x'^Z8jԒR+m)HRhhٕpo5= WR뉳?٭pW̪L0ǃgT8.DxP ɬ@y:$D̳ͪt~))1v.=Ȣ!w +ݯ7r8B긐m$Lä!xʈNmr1^gաyI|: ڽδ"˜̮ڢ¼ #$f{2n < |.'"$c 6NZhS>c7wIo;5{y {y =٩~̺ݭL("0 P\ 9Bs)VIE,R;R!oO VP*q26H<^' bYVX`c`gyo<;z#\e `ȘN{w%4{w[<`}}zj3C8HZ< U]uUuujoPXAhJɴZF8Ij jX[\ hiptq9y+ qqdF Qɘ$(AZXT>hH((ZC%q,eR 2vD2~Kw6m+w8w -0=UOCdApW8EM!(+ZTJr#wݵM֣wzv>'(!ۢV@D&5FG--D B5(r?8vsܮA6\wW S 'P\G22EҠa*MP30kP)m=hW10)S-n *"N$c l&,Z(փ {<~Gj.?X4Awh$De-{ JbL&G"p1AW2z R;9*BW!0! diH\+jƍZw0)7xtN fV} '+VTtƓXZfК#rK,d s, S~CI{J(bqclj4gLfWދj)(999׊N\Y¥ܜKآyn'pe`qC w@PDha5'dΚ {gy?Kt;;qFSRyN?h* if go~a}߯14Iy:.W-W]񨆶_^q[,֪T]XSzT7nFrV?zjy|~S9ŒQu`̥:[ns(¥6jY便pɝGAH[OF$.ۺaݠba:0 g0bgjur4m#{] ׶me7%.fI__i=fu@vh-Td94iO.9KϮ5!;4OF8Ge]`c\BC!V\ibv UJG=~<  ׏gwg?2sޟ}Դ YY 0뿼{׆5ѥk稛 Я}ߟ-_X!?ӸI[#F6m6]mqGA %d)fmps놉î[`nfn w4{޷ιnEoJ0G/$D#Ĝ2V'Ɔr ,%(֚FES^EMhH*":@`1`epd3 ՚1w(T)wBӾ/[c'ޛ\f{]KɄݦ v`YeiePŽo够ƈTE-xWU$ے, wj9r+˝^iTzgot5Gm'Z 3i6~pKs5UӮj66vIh=}sMm_6J)}`dtjbdO2 .JA- &i+'GaGAKHb)@ rM,PViJI>tn06[ ,y!HY\\˯_^4(U9ڹ1zFqi\VpeEڒ0r"* ,1e^ ǟ%?EpG!ьKn#/Zz/p TKaqt =z['OLx܆R)CѸ}Lf_&rpElbRM@-c ZAe.G܆Pky--" dcEF8?g6A DF\ΘoU)@"A7v#Mㆉy,8d@A鄓"4rf ШYI,(ie *Q[L$vIZȨRVtk"O3A9>q>&ю4;^7vaeq ǂ""逈"'6v6% \3n~ƣwuFsq<^Jmc@(ʘզ.hc2-}Ӡ8_4\t?;Z;g s/k7e}U-Zdч␕}L=J)۲ ug'}4zH4Y8G#QyCЎ>40LqU~6{EݲI[?c?8Uȳ.}*y݅*/ׅЅ r[^ j#UIA|,.ܧKSm$x۲ ۊvc6Z=.mk;m[-d%X?%%n7e̴ GM Z[Ʌlq+p/OrgtVQV0OOdtX,ijz\ϖS,>2k7)kg%j /98-Wy8{Z^VC8Pc& 69t&>BW~cOzOL^gˋ: +۽Olu}ehL_>{٭-pzxw\z>sw]wF'|7H"p&-ioB>sc' ɻf1K^^LVr@;sг!4 yBLkӷ.VZ NTN}4:E] Bq՞Y@[V+ǞhXM496:2r 8-HqEMn%fnֹئ$ҸfSYQPo-+Ū%n#>M][e4މ)  |"FFL4x۱]-[p>0}j=q4CmY 0.'T1Ƹ 1T1866 쬯qEWLktbJ; +r&Nb\kjWਫ! ~y7SeVUy \Mve P`RO5^&W/&DI;3n&w]BEW':51\S?9UH@@:t;wzXx |sR=];_''-̘XmQO7mt(]Gn%$P(n%y1sXV/mTӕ]5b\YMgi.]WLi쨫X|MCFWn5CLkS] QW֢"]Y넪gqZU娫 i슀4qAbZS|tŔv QW5ʡϰ*tŸZբ+5?+9lY(_g|RsvOW+{E)J^Ƴc3%*2?$Oin6~:h_Hث Sxbp=F5VoT)LuvCj(YhSZA%+K%G\iv&4n: ?mL㧭4Z6bBz ةz;a\]͍\5`)K[d qǢs^+8㩟;N|gOHۦ~PSCW8jߢ'銀ѨjtŸ(j:!JSӮ芁=T+j+ܨ+pX"`]1.ZtŴҗ+ԣ+ UƊtE=Wc-bSMBGvue/V0 ϻ)K#4gGj~n:߭ޛ|hyl2o䣄}yO 5dGי/Kn]:(MiK]-:yYr}.jɷsn׋LߕWw\Y%Vv9 gt9=8x SpigB#(x5mSvOW Yj,͸XMh(̔0C4o&U5K+{@ .1rc8 z2f,Z6L;ȟ ewOݩ~q{(ea}ЏAEZbJF] PW( "]1Sp]1,>bʍQWѕ1hHW]j:8t]%X;jVX+zƮ+ŧrF]=xo**z+5iOQq?J;Cԕ: 뉮 WTA ѣ,zfY:V9Slq+zcTfw2z|$;z'z5gpuPU^W<_´Җ2rFEǝ 8ӻ}:pq)i)%љjW]=;jԛKQWZtŴZ+4zu%׌>$]1hAuŔF] PW 4Pc5"\EWL+EbJ5jN銀%ԣ+U]1)>"J%F] QW9t5u zƮB-bZWSCԕ 3]KW 2uŔEm>tz5u ؀FW[G]=+(6:%M60S?F虱xpti՟l+^xxf`[QUעiUk)5=@M{Ixmq”qT[w-pa,Zޡ҃BɃ^wF?ۏ֝hB?ʍe GPcPoуHWlFWl-"Z,]WLŨJ|އjtŸjӞ1~E;t *TqEWLk]bJ7j༯iJUZQ֋uERᨫHW+6hjƮ]jK XzƮWI_S9\WLը hm+z+5V]1Aʣ+6X - (^WD tâwvV3goS-c&c^Luqy)&-n%{&7'ߟ߯*67s.U?/(i_+?{ ˋڔζc7 Bs'"ܯ?ߺJ"\No ,*o6.iCBtnB{r3;*V_;[pOGMzj}ܼwܪ^j)kqOTJ5?'OlWѩlnղ<"%j$1^Ȗ-̊CBدțOYۼR~0@(W+J#)zZ?ysJDn4_>SMԮu1(%k3XR0d*dhhMk]!(Mo9|%?[!IzO+fU.[/rz|)% j^\VZήm)h|0J&t60JRhH<]ʍ.5e .+|@Y!*%B i4IƤdElFen[fi@|:U61"Q\z * jok h0:Hi2DnށNcb!gr眨T"3HJF؂j ^o[i)k^9ί)V֘,FI&2P$e2R0A,D=IZ{Hy9d 5f*VJ T)lMF5Qmj(Ӡ";&a1@Lk?ח+-%ɐ*Hi ӢBUL(-&v$4MKEEwIg:LMʨa}A4[.\=[$b<3.H# UXn.C&Y8t!FQJhI־Zސ%};4GNOƐ{s+p"72ryNdJ[;ZKk=4A ujFwa`ZmTwtpٛ\6NuKk)@FzZV&蒂I")_j!>ꆀ]6ei A2&d]g(M DBΦ^RȠU %;.j!ՠPwVrEdܠQ { @rLBB("2m ؍ttg-JC(]eԭ9+ƒ sflq)ت(jS,ԚϽ7hV =liVԀʬ$޲[6RSҭ*x/GT^``7j QH6 W|ZR0UK mry?Xq}9tY7, ۴m9Qe,= j0fU:9 6IҢGJ$mPp$JHu6tk*%8O)'ޮ Ę#@99KѰ-G' <%2`Crh[SP.O(7"fh8(uR"˕,.TP=`ePˌ`*1#K[Ƞ rwU߸YaKl+TO+ IISL1. [`G] +U@ &eCE5FHmr3u#&ݟA l?ڋz+f;n-:m)@;\>9}7c~yT<) >51lݐ үKO϶~ho>.ql-)\Cm6itvF;m|hݤ9/f1ů6ze.qr|ĕ^.o'RIh>km+_,_H+-{OcI:h^@b'; N v@b'; N v@b'; N v@b'; N v@b'; N v@b'; t@Hꐍج4zmGW)7RZİ}U[["= {b >_;::/oT q9Vs ⢓9`ĔP"LE fr2(P>Rس :H)v@Pb;(v@Pb;(v@Pb;(v@Pb;(v@Pb;(v@Pb;z+?^|m5V׷ ׿vb/f{@bDm7!RdK7V u%BqáP*V~H翭b͜R //f9:jbV8z:ų+qie|vZǗh_i{qtW 7g}vX˴B{۔h(y6|~C b2AR - 7j.f7!'] ųX!?JCWM F:@#m k}Sfv8EQQ?RL!P;pOdyncxdw'NwCk㈿ ğS,>w%m0+2 I="ZNY_az:tZ'DWj5&CW׫LWONI]pTq2tEpduPz."]rOp+r2}tE(dzte4Mj7"&LNWraLWO2: =+;"PzVWOr ~{c~*tAT UPQ)`#vEp ]3-'IW>D%P';H9'FszdPOow2)b ~:c nSHO3NӄR2M?E&st~V*w]k4vjk:Š΃ItD:۳}0E?/g˗ fcz16W{Cvqۆ3&MLq6ǡ+z2Qٷ(O _\hztQx'^?RGL {}'wCF(, ?wvv03"6L7NW,LWBWJGg "zNڃWWr"`z:tVN詒S+B+BY]=E2ƚIltUY"LWO#&DW8NG]}P ptE(wRӡ+gS*XI;"j2KABi`6h5t:wf?7G_~A`Eӟm$XnQՋv~t<=B8pjQr_xқŋL?rrǩݽ[S 6@lF*-r9b?~믯83~]E%1-cZr3_-\ NwA{~$tsdH҇IWoAnFi9`ZC* N meB.܍f:eJܪ$+r7+JWbUIJƷ.d5AEZYoV&giMELel4 paA7g}|c6j?qev [r^BBݎ%d䕥R%M!Hv'\*95 2{BjLXQen۟d-wӚzaۧU5xoз\ |7cMca[-#mm%dM:s/Z UQV{ gi=z޵6r#E/{=|_!7Y,݁GqYr$9w%Yq0դ"bx}j>~ZNpmO'/jj-O[bwfm_^YRGo)וo;غG11q]s=?r$|||=|՟/gQXdQw՟.7c{_Ho}\Vp_#/iP;z8>O Z\?Oh_p)$Au|`TրԻ:aoJʲxE+B*NzߦI }7/p/B6Dfȳ>z=yI纱yGbu0 oAۂ Z޶;޼muUk=RE_ՠ\Y*e*PUAPePe`0tj?=`Gѳ{Ҍɣ$ZʝaTʐdeNpHِx4Zlye%[6p~"tAH| RjD;%Hh[FGy|L~<4?bsgw7lN4˺@K$w ~i`rs w̭pb:z zQ9 *:GQ(TrsF3]倊{ÿɓVCq6g V6MdD!*Ĥ*͹>ٜ4=Yn_'Pfka(2^ivr] "Ȥ5Ȣ %!2W0g.z+4g 56T?nGYg9Em{S^O.l'l&G& 9Lى T9iS>I:MmjM9h.\-6Z0{gp<.53a_l ffeC|eUTLIΌE;商.C1Ȅ3`rH%,;%1yq p.C ,VYXf12I!8\d3˜8'w*hnyc5rv4:f eC||s1`qدB<53ښwwhp\y3yUMm`:[f4=[wozt1ۤY[;HڤN uՅ/@hsӥvrBd [qjWT]577O9litQg :ͳY7H5+Y͸uf7Mo+oo|߲CK57x4޼tw0og;9k|K382d5k·?mQyQo|I d0 h#,Bl/[nm~ ܂ӶuۼǣAI?kUe쌫YfZV*APr`HjS}Ze,[uDjɲ=;[y1BѲW%-(,cC`A5٠NI\q{r)nH}YdFbqҸYNi:HVyLllFvc;.'%khЏ7K LoJf~ue WwEӻ)zHJ%Q d[bkEDDIB 4k7!JE]="Qg`]୷AJEB9C`0$xNOlYiS!1IzAS9ZBm9%7 (o]j4x*cџTmB`:_PzPU::O73c`1&WK" .>IͿ7].IRMȓM6I{p-9g\2qrN>- ?yܜLOpY =ghAaHr%7Єi:tZB>bv4 SxU{~ƛޅlhfWkM6{TI+!COٷ$TphD2͘X m^ZqFKOoT9`>iVT/nFX~nJ^Nf-X傘sAY3풣Xm>,;7غY=F,ϵ,3ĄQh,|4'zrg?08Nͽuc%E׌s)+b?Ʊ4}9!{#ި |^ݨ]4Z~\hY)?~vOw-W.iRaqaWz4Gy?YkMavP5q_!Mq//HH˟>~ߗ=?\woi-)ƒz =Eyw?޵muMͻvڴ9لЯmyM߯aV!0Ӏ:e7nM*HectM<.ث^{LE/KC:1m-8625j2Rc1:Civj1ؾ@YowO*j4~MeeR_ϓ n S~,Ho6m(ozz@$upKYC_0^%Ux[AvZ_`tdުK}ɼRxLN.X1;F8pAXd*xJ:k)A|LN7,3bmFa׃n8fn^ߘ*9=k,%nu ȘA gW'YxmKudŘH OgNN!4E6xqNDxE'3m2A:D&O4+g΁#1J :RV'cED.Vߊz!!D1Ɩ5r *~,+Ύ!5>yb<2*i#xnkunU+oq˘*?c5Z&?pI2 BnN[IB("`S\hU&eY `\KϹII[#g;7Z*8㉺X˺upš˂߮&)$vMVO~SjПLJ#$,ȝLgƨM;^bڨ@7H+ ܃ +-8IۚLx^;]rAaM .@яJ:nя+Nd6P% ޛ1Ʃ;;D\7o4*߼3=y'.3MEVnyDep9瑫wϣ֪R;?<*R.NI.j4UսFյp9xo]G/|,A!a@_D*[%wmf*fwmmI;;[@0Xd3@^`wA-qL Iq[}x)PՔ(-4^U_թ:8ՓjyÊݭGkA`Փ[F`}P맃%,ng`~,4~rߍXz782m:M> Kz_ր=4nC)Lmc)&\7ACn4Ad>hЯ_$0k+&˛=΢t5Fa\5}.;O? iija7soަo9yAAO7Z&Q9ƴG$q R&pe -sprW׸g =cxxJHJ9.5)J-sH 0NX, R˺M2C9pܡ1R ]&pP ٷ*3jy1bzZǟqéwwPj=mVXSzƟ+g<$@"sQ5"ai%ĭ>O3-ng=<_Q,s%]`zR1S2j 7 4'{S{{q\2x)ǍA4hJEI.3ƒC@ ]nI0JIn?Ɂ~!}C<8}եm c-ͩ6]]*i1iwկwvt L>^c8Kn}9f!kMduM;Lek~(;UNjae%nY'PPseQ*XNQruGwy68>zcm0xF_ D"l~XFëKE[ Tb+~wpqs;wuϵzuw3ëe:**ֱ$8ﳆz ˫.k-Oإ*ਸ਼{'dw[㮎@wc3ԥ4g%i w=_A[7i>qAAbI2}XYj\*۽(%,\N`Ôh5/s/1<ϻzw["i{u| ibe-rM,^L?nǃAM׭p%i=IyQ6aZED6[B2W Xv<\<3'}SXl[|+]_|ʄV {-L{ӘdUY=G/m.䨒)ykfݔ\Q:e`!]Zqi< y/.smtWik_ah'83>o}iE^1+jD?a K!5W҈ B}B`z2NLѾЩn*@N4UtFym^,(Cs/Ͻ{>bX \poW 0c)towTA[JZ9=s y.|iꣽY3{--/{JJG:-?ЉXɠq5R@gB)# nՁ2ғk]HoRg𮿮sKk9LĸYdEsEQl5:`> ĞZ`74R`l̚7iUD[`4)2oxvEL~&͎n06mr-]G<_ƓteLvOK8-Ot;kmӃu'#)>*ث6k:?-dCx&^UGU3AgGDL՝R?۞f]R\u^0[ߗ3-e[iLL~ m |jƍָH iJ{nr Nh-1;D.V:4$%ࢷX$o:_ :T!S^9¨0¬*,QA {*#lr՘ |MQ~Ra0dr>'pi~a6~!ҭu:3uڝPrO2[7>e$|J,G %K#SAHDb CTB k阘C69"XaJjqK".YAbDBO3Ƈ=tYTvv6ǿ/E&WDz[WHVrSIAII8@!'L>(R$ $>[}cA(N9*C&"^eDgMQO޵ٿBKEFÀx}]{a\'ȗB?%PzpՔ(iX9ꞮuQeQ*N=-2ֿʔad -N!Mѩ6wd2Q9V_Ϛ>\w(Ke͏8guRk E0ƿ^̫xٹ0~jI֚__y[5`\,_}O*LfxtrU|88GZl@%^Z@aa#b%%Wc|:Rzj)rn8XJ~~t&WXR" i܌/2>뤘rwѵJ"F&NH~9ON?ǧ}0=A>kګȚUW&$虥IkJ{\*E4_T+`'3OjN~$ ߃|z2) M5T߽O}"|4嗋]1~ Ճd 6PK#O?=L[xBh@]T (>j)tVvVM7Yzr6޼7nrI4G 0XTu逵xu'/ϋf^8*lhnծꏪoA-mt8^TxQi3|B}J=A@+nΆw]5b 롛Y/.y QlNc )**Q:HŔ^^h^M)Uh4Ơ&3hQ9b0XJBԀQELnkCM&yl;|lU¢|K"xE,Yjm!fcUB]|gwEl )pqmI{J3E0F aAqDQKL_ɞk1!ѐ7 *RVa S *Y1X”|3Yij4BZ;7Y6u:2&yjG8ϮSq-Ǧ ;z&⎓rK幀ʫr΋ֻp 7,]d:dkHyYɍ-Tw'IfI7U_ڟJu;2[ƒ ެBJh.vSޑ*]xs⩃k%7D-n^Y˘nN-Z-Vo/vW^,)nyL"9kt5s~X+^5k>VW)p/30u=g), mn)3W1u3 YzU_@pNzEQI0aZ[HtHq6pa2xd!9$,`Oά>I,Q,RjBS8/mڧ߆n9ij+iƯECZpT-UJ)D= vZc2P@!mh+#^!fV0BgLD:* VZI`"ߌI!n']3xEϧ).ߧϷP`Ϋ]Am"~;e\Em*wU[dAj.j M:J U$mc\2U¬uƴ[lFPqikDpOfWUJŌg7QAM.>d, *wGN-.[ V=˜UOgMW.񭣭kc?T 4oCCFdUO0GB?J|+}N 7qmvrg@$ aɢ7RW~0,x8_eb>3/g7t]= U=5,og;Mhy bza7kjtxC4y[be W9,n:??oV]Ԝ"$pz5uژi.BF_TY6k|3Aqwu'}+uJ1%=Ь!^-n|Iq-] BM=\\kN-;\] 2z4WT\uйqﱓwڊmR"u|s Ǜ'.1s@;֌!GH=S4f dm) ))l@ZʙB 숖uw3 g^v~`.2eJM&p !cl~FQEEK*{zixZTlcc3_-Jz+M!f';%_yVuYQyrVi0a2F"TPsLSR&tZD]JF8kS}0 ()&=)Q۴U`̙R1:ۑ1 X{,-E[VfWkP@aFѰpV:yczg 6n T;kX&($gw,9Y˸\pq+H@z0TDr)+l%(7i`Vm͎C񐷌0<<=УNRROz{ J-n~O\_wϓJxVI)rqE%ۋ)ҳj[g"Cg5:{Ӑ1߳? #.i.`k4umv[_⵼mk SWР}.&%{'k5ZV*~gv7Mk~!hcW{y:[$z.n''EC C#JzW(.%/pepjWW\ -H* lTo EK/pKup J( \to*+U_*KL*KaWWR!GpK*zWY\c0K+T*K)zpp#B \Uk+Gp~-Z"WYZFWYJzpYU \BWYJ52J–|WKbˡ<_krل^̪WE_DxwoxVm^4ߑ%A5\2}(oo9ы~ͮjS; neXs u] |:C<:hF >gVq;x:Ź1O%6gǗ`ߣ3?Q( 8QN(S^؉)Q4񎍚NIzwod"S+#Igd>HȜQ6R (﨑.GZ$Rr])Q!ʅM3!3`} Y\W#J+Iz dwCC/ 0%O&'K\MNwD~Rm=JpuSUWY\W(-j#wO)WGp67p WYZyRv-d+T}d* |򸓽e݇,WJ{p3izWY\CW(-pdWWP`c fq7p%!vzp 1,1UR rguU?*K{χ+R.C\9098(g?$`py{QD7OΫt27E&k̹t6LկwGGEx9qr|A<U KEU6@ cxu;,b_]~.ȍ>K<\l{JKQEaw0Y!Κk#TvY/ϊG98[F>\MbxV8F (Dtp ?f?yT͖JЪ]Z[u&y8*b[b,O %K97h+%7oTRB/ׂϋkXRy%m[U֖5Uza/9?.|Ftc%ZjR;6ڽ߆h)U8jgUNuf)2gpT9_:tsr5=0?:v‹r6NsVge- ĩ1b9w0G9 I l;W&q9 RY V>Z z^f[\;+|= gF"%i̱,rB,4C''rB;R ×n߮gw_oCCU~ ׺[T|},F5i\vP p-u_a ;]EpS=MUW &SוL]_ܿ}K+^jEy=P|G T3!VՔϋb+>,ڻiy?4m(Ϝei}G ?[ԧ↛/ܻaד 5/J1/Rzܞ( ЮT|{܃i:ӪPx[-*b̤2$J!9)3H%2 CtZë~]\^C&9Ք[Bol;,W&MjodH"_L1e"*s+U>V . mE=V6 $P҂x ѨD2X>^ ($J@Q<\15qv]r E Enni?+]svׅ z6~7܍aT9eţV-ߙ3H=Kj[8wn5vZ%62USe3U[;M[w Ehqh9܆Tx9q֗uCέs<(%x@-XSp!ă6\0(fB Iݪ6W+t͕j3sV 2_Lo-y u\(Ku6&aJ sLzʈNmr1H x yIz>;>o3`v}}`œĄƗfXկ^:~4kK,招vmmDI(f2ybV2ƀ.qՆZkMV9jU="ErQPBXj# D#M!xh9QK[TmY[i ^˳0=dzpwz2-ey"ܣͱ%M+1\)2FdWl ZK"9%"KJ-G5Xc{€QJ@y@Jŭ4d4Wgg 5I)cZ:F^(18AC3X D9arkS12lM-! Zj]$w_S*q iq2D/= ]һRr"\V fYuN99Ҙ4)MA_A9M u~Xi>cA#Ryb p6E FꖔհwQ5H u* :&KXg$.,$<j{=A:cÖz˯kV޾ v/N{8@{{C^ş-9YܖVQ(eD QI7!xյwJԇ*G:8e޾QmQTQ&5砃FDR4B>+ADiϱ}ņaYi⻗*#pS{uAxdF"E hi-JENG=>M*xUdZKh F"bE#7iQq5@ ZZ_@}M8wGU}jee{48HB$nmfQ Z#CQD[?AArT WB<A#I+aGÀq:9$Y,pXCG3Ej9lpU )QA5-ڡSLjSH @L`*RDlj{) BiYS@j UY~PDLH*aHL: kEm>x-׊2nweDC5K?8\1z?Z8v{rl9y5غA Tߤ] М߾6gmNk{Jb+ޏƮ񾺡LfW5 daPN98֊#m iԏ/9m[48RB; 9ۀ^/q@F -19>ʳ&.cz.B\5\Vg~pd_-k˫lq4*z䘵AE&8Qefs+Np jg𥉉>x63⟭IurF_ s>&i<:>Ɩ۫f(©6|ަsxs& d%[r}KCͰfPldy 1db`żO'@7WmNG;:pE'ZkyF69> ,HnXa: +U"~Hh5,+mi(,^Ow{c8ij) KlynJc4v }ѵJsGWԿw]MU"?;woϟ^e{¿sCI·D"$SM^ilo47!M뒽ZZp]nBA = #u;uU6Cld)fmpLDZtx?VO`Rͱ\ ?"GhAU~9]}w}ɲ3">saCN<>6wv{_[K#4*5ώ=781GaQy`-U:#RdkT ,="l\ `WqL26j4n!X e $HbbVh=W,0P-W!F&Di>6j͙əBʂSgɵ C1q~3wsTEL><,kV7p7cڇ%»u\ grQ/xHJ%n< kd !P>Di%3MVypO'q=гGyITp 8(Ky=Zk%STgdJԜ$%G"ȓ v9-By}8> L]55sGS<6&/4|VN8asI\d($}":,/ĕ\dϟl5\y|߆k#q<0=Ƈ7u@78x^ÜCE$7x}~cv*ױWY{hn&PKs0oLetc>"N7޲ɣ6v[PE=lƮsmuuQph1y߸pM76Ktln=sH׵cj%Asw0oFvs`[|CW;χVWgmGkώ~o$UcrŘ'2ݕ"'SVVub#ӈLK'`:>= JZ3 Ӕ*Jr*ζ Χ]m[>GC͛-˔4HN,Bt^WBv,6iszgE5E-& P_}aeeX&>l5 馉Nfz.hh U4}۞?#.$}2&IܟƩ%ٜ0>`fE"vozk7LRTP, 9]sd1T*@~r|XɩdzV,?ݾkTa7S[sLҫDQ̣fV3 o-s=d~Y/W]W/S[׼;O8فg8.&0g[).f_wkeq=2'$vht6ӧfOIJt"XUfkሯ8x ? DODsQ")lkKwE@f "[\ "qn'yS6/q.]A$IJ&Aʥ@=&ckϴBHMĹD *`xjg? Z 3QZYy(k? Y!/6/E 3f hD|,{ؒlҒbc9`&f*~_XU h PM*4V y4/u`Mb, <(E#e10ڑlDIvO%>mYW'spcVΗ*x;擯>a^pL S@!6bրgD4=FJN3Ay{Q%zF|޴ҎP[^ wm k(|;'7M{7y:?l_ۙY_(ӴcmflDйRxIMP) LƁ|U9Er/Y'x9:B%%H>B lcH94&cI@:D fܽxsnhz7lϛk{t@(z5ob> TxBB z.UZ  ^\BPp-^ v̎t- ^&aa+5OK WWiت/#e.Yog=܋tgdڈgcY\#sUZ6W)q8}fϖm3/~z姿r990Ε]^M{z.Vo9c_1,dHR7 E -MAB Jņ֯i 2J8wumOW-m/W/1\ѶwOIHܫU,skڨߧ{b'_S6PhV/6gc> ^=xԋ]jwz !U]R DNyiQ! 4zZr!}㹐îy6h;x 5B܄=;edI?ųb"*`w!"3/=zE ! ΋\f+KfoabKQ"-HWM |$cP)eՁ$zٮwo rl6;/QWמ~ P6:8E1W TOA ?ń/.i9M!5Glbh9$#!209ځ9=s$P $JR`,x'5`U9z/}HMM^(',R^2+R3AD뢲=ّ9.l7pٸ .kJN?o?һ(0_kwxqjxm)e-Ϭynyrѳˊxwjy7(ڢ^{9 mIT K*m.~WnwXQq"yֱ]䫛*zڝC%[]^q9w4Ok|/ˍS/WVuוXln'۔1j,jsw(L_":T} 9Jwn$g]EC9¤B54c@5)deW6d|LF+~3Zg{<'0{^T7^$y .SD9( ˊvZpP*NH&X]LlacqN(LEx͂Ol fR5Z^٘%{qӃIvoH/lKn*85gM.R|\] [/]>zOT1hh$Y#u9I顳 g,1#eH"Y^r}6bAڌLr9 ՛8_ n柧iP}Rdq*(TRIkP:%  |p!IIzJ1nMy8Ġ4_KiJwk1kQha"oBh=$VTG10 P6RV1M;ݨ{ ,սk(-W)[N ЖsOQV㘻ZWu;A.nL1hB㣣]lJ::ػG!'fpu,ѹUBaPu6'I#Um”r\IHӮ!v>)KQ2VYg֨&PVَVgOgq~YK 賖RR3<7ZDX"WI2Ϛ'y+``39oC};@7"!uZ@,)8PAշ-RCIRaNoi Mn|m*5cHXH%$)kՄK\@`+ʁ%.ě;h^ =1#Xxr2DG%AB#z(HePM `#|ߴ@[4U{6Ī%X}"*!wH_VN5iqcO{ ]`%6 UnD(3Y`L|T+x9Ŵ\y9uLCC_^=w]Z_lس!2>Uי]vKRYE(BˬVb=kw8C_\xy P-b:[&MV$ I ٢l{ۮ M/検%%C"($zHR`E MVd }>.gtDSZ=V]69~וh(EPQp2 ksl|=bH9(cj\J?ۮMFj y GdaQ(X"@O 49F"H Zv,nl괥LIB1Hm vV"c91;-dzg%_=8*hw9I^ZS #rDWEΈ-e鲏!}7UeD?>??/n\VWZWWa˭x%^)ntLVw:@"KJ :8Gjm t'lOhտhU- A1`4.|nz%n2NӣuѴ.}w9:B< )}l7?ih rxd_~Z>D/_hT13Xs&WϿ5skgVBgﯠ"{7֧ݣuWluX\xśupv0hVi<-bl5GOՐONh j ]gK`n餫 3f'[VLYkx(֕|6|^ |C[]uծ2^k[(ґڰY*w(hHrC-K>Vw_/>TGWwl sq]$Q^ʈhvv磴|mJnEQ_޾͏?T~_T_޼k^ /W]"` C{Өj[5viЮ}vݿlW{}eInM2 T c#G&IWn%D|!VyXQB^XD0 ,&kVQ㰲.M>죏mϏKDVשYp9 HZ~e@J5l3}9!ۜط_=d{¼eu\5\>MȥF+aZ<GSqj̶-c@tRzmbl"հ-ɍw! [=Hݲ(}qN9[C3,Oёm!hD) OBYt$?} RA1* c'.-kBjw6;U4l]Lrk#&j1MG|_] ?Ԣo vi|Dx@k֞kL3o{~sdU87G8PkPl:hvZJHDxIm.9/ܳ TAY7h"@I,Gs?أ~ F PH7D˨@EG8`oSuN狮zsY,8㑒6)EaS/IG!t)@ѹh@9UTtw˔^o]u ?@'zw\3b ̛OǦhWLdk hӀ5 MSBh}K{^ͨL򢨒R6=@",FĤ ։(l)ev Z}0{T{p;||=(UhjȹV)hkBHރw^$kAyU#ĥUdp"SPg#LF"$NiAȲ8'0]Q&Ri͂{WyrϨuTG#W6~DVy{D!|0`I":K%@NձU2 Vߤ]aNj梥bC *3GRQ PHm{gDνalao3㞶D϶Qm2-⎕|2 MLf'7nQ<&|!u揅!%2251i&jrbM'QWxD셬fתMm]-d|v%Ⱦ(ޖDE$=[nl(XfǞXmF'uJ*Y)IeTA$/J&v{ؒzl}͏ZDٳEE,Ƥc-gj9_}7 Fػ=rz!aI03@!@vԥʌ6&%.pŸƃQi5rFsޱI"єm㈂Q\cUaj`) k2Jxޣ%@q(2"&3#NC\'_gkZ).quL 6W4h* NWQQ(ZI9cTw \ؖvle< l~_ǓxYkk{|dˇ aUAp] E?98s)ZjTsFѕɸPtRw!M H1Ƿ|F-82]?YAAA[!F2HҊz7y.DnA@,OCl UC\=0"ALN6!]7L,h5T"QRk1-[ fY_^_Vuni5][a7 ƨWi\]r܉G=?.W;~/mz(~L#8u>UZk4fC%5֩)2@ĄkxA#,vY5CY0 {U\'M6"(NNɌ8.y@$.}HM 403CMP33םQ1xΒµ 80 \p,GΊ<nzUv+f&ղyܬজ#_9𐜇 r 29SV hMut>#ƞ("Gلt<p49y^I[k)Ky=J)")RV$%!O+ȓgwtt8N^9"%# T*HR$BHIX # ќ#&!Uڪ{źH-VuG!DŽz)j۲Ćy dK|  itxEr\[| -O):Z&#DPx|YD9{ `|4-dx@@{cG)"'Ģ{,nIFa+*GҵCYX0ÐcQ$> ⋔Z4^]ߚKſPۿ:?ďu0aߐE /ql||4 Esruqv8Եh|HMI_2|^M80\Y L+kx?N W&G4mO4?| 9ΑzpFιѻ:'CEVtIl0M9mui%i5;!\Fw55Q]l  ~?_u};D͛M_skvRΪ,~BB=տ{l.4")W]tR?`9FbwYZPpP^ze``Hw,Bco8Hxs;6btWMԖ[MAͰMXo6w̛˸V]k~}ASy`u'N΋99 цliFnުt9RCK#1NBCzgXMd.^!S$qqHӽj==ޯ8M'sNK/Zxgs| P6& ~Ftވh+9Q5YV@/kOg^UnM76kZ6/s"/{:w | y˜?O?cyշX*@̖7Zd gIXd/ThkplD$L{^ۙFlGg73^D28jNeRN3퍱\\>#\ڧHNDlNvhnq:sWF;M樫"YC]@5ģMfe̫" ))!$:ҚuǹLK^b~CV^{lN}nl]"m&SO5b?: ~<U?&rC'^G_X t=ۉ+3NZev+4vRj}o\dS =n :qf&Uimk6Jd袭-[ME[quٰ۲!VРmN&^I^7?FWI NjQ0LGq Ï?=GEB+me f&'jcux=B,A\ ?z-h6CptrRhsRƗBMR ~64VEmK'AQqٖ6IlYΤ$¢SL Ac\9 {i-"BHZbYO'6DB-ME Θ"_Lڕ=ݣcg̽1Q{vԻdG%J,q3%XJg^ŸNzthJdD( n $$@f$#!ѧPoڤ 8 87Z@ED[sR8scc I$3⩣F7|91%#RۮyEWaexjgvɽ Z v煨10)'K?-oFZ A1 hZH]BtRR B/2xV:RbZY(I?^v ' &{}"?;0}</=ڝ]!/z *|H%') 2r5qܝ#,h2&=-Vo^/M/]J44mo=W^|2l.Q9jfm_sGys7f1bZp[gZ!ОƓگm-*кg&!Pt̸2 ECtڍGȞ{y6y6eq(IId .enQV.`i4[_Kkax:E9hւ-QiC>qy9R0 W"7Wb.=|2"w/+n4rz u*E `<$2bFXH[b xCzI\\vo3& e3a^Ȅ9=~7W* D-UWC+RpdWsK-.Ţ+QI yZ|3~+÷! cPq*)> Swh8McQRVR^Pq XA}{~δUI_uT^ ᗙ{AI{}'lWE<S:-)dC~4t}zo[-%jA}zG雠JwY4]ZqWVU+>#yG358F,k-FI*v\guF ]ѮhWa0U*vF_;F!hص0U*vF ]ѮhG1ۣ8c]M2コ8or%,x#K5/W:pt8s9s;=_'GP@e$$?{F!_vmr%؝`0~hk#K:Iv9~VKݶĉE,YU.Vϭp.OAі9RP)1jWݔB`Uv.ɜq-*;5DUpϟ]"i='{ڮ;DJ-hW\C@]qzW\+WwH}%4Dtջ]qzW\+WwջGWQ/hW`kW\+Wwջ]qzW\+ :Hp@O#lg4[m=3q3`enDN;Hw$uD5'Dh+!$cD">QM'FC.s|05ER(c$rse4 63ELZ#)Zr2ҽ]歑sG9gJhcy r-8wob(>7->$aqļ22 GĤT鈈<&LoB_6%j0R"X*Ho V;g@jGG6֡u@R$JhD 2Q-3Hk|+OI;f9&fq_14U0@`d -M~c-Kc`Cs.<n o (d#l7kr ^Lʠ]aC͐s3csh@B/ 䠋'"/d=^z E]YKAA'.Qq;,c=wq{QYlVDM)O+#ȁY, i0Z8#"J޴m*0%OAh˂DH L PV~@S8T;XOͬ`U-6Iĝ({ r(|1.Ęv5,tI=67h#k|JH ")bSH @ `*RDlj{):FjP C!pVY~+T$.t׊D}q-uF 8]f%K ~OXXu3О cja}Bk._3_rې` (IzN̻9w0y}0޿?/n\-)!Œn#wØY-NAc ;rN&L+- .d6G!xqC[m",*@xYP@r]qCᴼC=:;ͧFL>rw>vbF?uoJARox$zr14pvS.ƿ4y‰"-6D]q*+(U?6TUݳ-uU-ʆח7l 1F)$}ppv^-]sQr9/oʿ9waJ4э3 }iX4hy6N~X>"#FK;w1khxj`cq6/ݳ.4kZt9,'%ӐJWj0O_.ş"LgWJT ;(+=ܣ!^a< U&vޕ=EՋ*D)wy~>^&܊~ \"XQW3D#G_t?j{Xj2R9EIrRR n `{eˋU0ҋq1AdP zp$ĶmZw݌i[#l[m)mZKPKպ^>T~U^5˺``Iٌߙ_NGJ zo>;gK^.xF$Ozx{h-ILWH$cm_fPB t2L-bڨ% +j)` s,Nv9߿-G'<E:X2|fUTig>hU x 8Uϯ>**YHAH[ꬍ @E*D [X `:L]վ}}Evp%!:2D!Y (" ODH<aQjIn<gSLWVTNÔ.y=- dbX^m( A'z3~- ΑSW|߿y]Z]~l*dbɒ( Zyl0\ҾS ,}tcEdgY<$,8!4Tf=3@wTRURv1|d^^#Һ`"w6Pޝ(&.CQ? Coe)/]`rwQ=-X=A_$c! UGh[ o"a:KY1 Y%zɷ Iɳ?“d4 k,b/p"1 (Mbg>C.wXK ބ dO>ʷ娉0+%/֮c<3fs#CpNca4XKk()aMFhP,N"Ɲ`K(eTFC&bPB$&!Cmds TUI"FFr56fZs&99SY<՞J1p\e9GIovGz5C$+n5\pȲ{7Sݕ;5jXRG<$ˣU.!A%) HfH4ԀQZ DJ+ǼK;ӎO=<ߝ$Q(]bL ZKD(F&D)MRiEd0K;tKpXr<RaZǨ4D>Rps!s *ri׷GYDhdMh6CC&PԶe7 \NdKx !t{lξs1.q4Jh%E)EQʨP;,"TwwɹD5ę}*I9,XTIt܀^_ے4>6XEŹFzɤS7㊺ѓwbt<ޟ/4_fIħ2'a380kTWS^WՒ=p͗+8w($Ȩop)fO>,r뾽q7۳k;q6_0Xp6gP6h?>xHÒvS[Xlkyh$PKpq0*#'!g$<. l5=JRFUMIv؉B!9z9$J)n7z9z+yerb>u3t<<ݬ}yYr*m͊J:p4Zqy$˺\֛ϛF0-ߌ#H^ =c>"DE'o֤VmdpӈH[[Z$Lcբ1=TZu͝4%{IvJ?{Ƒ@\H~0Np1n b{5E$e0ݯg̗EYLY g~]U]I5AOW)71I O7}N jz&^lb)؋_O җ[h?WFUϾi3! 3צ.|lsO}'ɋ6J0-#N)q:7)zĹ6oK~۩^GN *)0o޼̛qຓh6:{hWuwF''i mWsҍi]_t6b9f| ݣ:x%&4?mJYMZy23'TMʆ-WMt]!DPl {U-C)$¡JhJ`^g 0[\A(Pq᲎ƸQkX\z+e1vS.jvTb+Zʗp<8A#*c8b*ֆ "1AJ=ADx훛գN,:vN+#[UxG am4r`8q2E^r}$c`kh&ؤn@ý>⌭n9خdrFqkoӷqT?~< =s˚k& l`1FSz0XfG`e| ֭h)oz$gCD5 AS#r B"DBa/fM:.m.?OT !#– ֔y]d*=|̝1L1*4ZJ)SJS@` IY\QQlpfAH$*Ű@H‚ZCPK$6dNXɶ' t})c(*wa61 15" @mc7::hkb,Q]' q֦&|GmAW12#XP1g&HFbGrJ1,,[b!CPpY UҌxf}!onM7uGh&b1F~ҡ pK6_+Yي#TѤdO$& fh/#vHHLBDNˆ]m}?gb j=Am}`W:8s I0Iy`:hL yt!!xC@B%,@(F.qVqB3 ƃ$H Pc}H7#g=f~rRq_l0""qeF!FFǨj?`QoMJ::Aq,A("jm0 2&1pz " K0I̥z910pq4}'_g1.Ia\$\(fpwl2%Gmc 2yZljq@%*=>='PC)s;)g#ZVnTN|.'g4#쎦 aN;w\ VكikkJu(>!Lb_ٷ1nW0oeeQ}ۀ-T|cf( OoY'ٚz0bx;:ܿzFA_Wo߸D"ނ,uzSueo2[gv'ϝUҷ= ]9"5*Cz Y@SJx->c]z, ~Y:LqL\i}ۋA\ynK2l9]|U$!K!5״2Սa43ɕyo ѱqjK#GGW+|}슳UמvA{EWHazG/"A\hI0δI S<8Zن%T(A; ZDl h[JA)tR/ٶr7͍f +<ݩwi_-|"QJyuf%j5n*0:W WH盛dm&͜ ~'P[#A:$ԟfںc#W tP[w]y wOC魺(U}+ùU #z!M(!~>HԔPq0P Xy$@5&"}gV++1-/-!'GWѥ*[{0H-e` 0GMR @LjnDh:vF|I9;9f*Ш~>*`< CKw{9Hjd>NW֙~:s˅4NB3 M*Su5臭 @Ef:ƥgqb@Л6V SaMS%{ѻ%՚8ڨ44k+KO62ﵜJrg9"o"=1FqP"FbxB1DVaVKKLxJakC2厡]M0Ӿۡxiߖ,Z3O-qnuXjlB8N (:'PIbRࢤpa[}^rEH Oc ڥZqFY | \K0G/9bhGX7Qu5l.0NM3Wtި0&Z"Bqc!G!s\ <GS-3U ;-Q1!|RiBH"Ƒr@jR;mT#WNAXIX6y"eL!Ҝ1>\ pPZh² aIU>ܓ?z =~w|Kh?eE\xfUkԠ,XcV |PLN 6Kb eyDy_hVk.=޸e3cU1spAŎs nxB:iDLК;#f.krs@::oaMO8V< R\?p3u>;FM&݁=U:FMnV9*`ac|ܶyO$:OPtl52]~O^r2$Y1(+BKςuh4XhܷtexvTs[hflAQLc`:A^hi {B 2 9) [Dmm#) 9%lEwZd B' L@]YTeH-IkX$g-6 $@JPZ)OjH;ĵ(ߺ<  `\[ȃ3 a18D -lE{k9kYAOL. (9*O҃DPuTa3F9FH:b_Iv-psۦ 9}i߱Ùt1IN OTq8b$_c}N0LFƅuaYۮRv.{<:(b'Yy2 h>tuc 9eSD:#9ls3 ۿL/:( ! DK 5j$_iU4qQ-8Y/Cワw'r9\e#S(j`e_?UE-t~W9UH0:'E`y; T/6^q[HQWz a$6uV6O5X~++.Mϫ/WbtA̅~\[k V:6`0xIڑ_?E0v N> =YٻM7LNy$F+IbIJ'2>?W̒'wp6,T-:-`~-4N-'ٯoޤ|R3.|%O &v-HmNTN7(/g7:!_>=?;})&ǧ޽/p_e:~0 =n _wZѦCSvZ69jwW&\37ǥ.(Kh+uE jcl;IdvHh M!F:8&cq౰DƠPiC fQb}o=`I;%/翇b;cwRMVH*NS "$A"T``$ů Ȅ(0i4\gѡk'=2YNzsV;* QOo"ʧR7QRQ@K&*%jKZ1xlm&G B-m߅q y+\@-]W]GӢOVv&r#i$ g5ńv_T9.Zer&[:]ْg7f!̡e^qnZ[=/o "ߡ畖!ͯnv, σe$t-W[+puϛzv}&G,2)[۽ ]lKP+[k?mY47H|,$GV/ЎQ[3Entӓ"B-uupx@\~/SZ=iGXeD.5L,7$ S/eL{ϴM(R7j0~M6^vqMpɴڦ1ZpdL|Fn.`TJ!FHPE H "D tT\xjAa/!rƭbh#(`Ϙ&,* VZI`fHN(B(Ica"pCU` US s'N0,CETtX$0FeRKls;o<yy*Cn6Wye:oRZY|f;nxd:g0P.;/ͮso@&.QRL. 'יT]L{Mݯ#XY/ETUu#Nv5t풂.]:_j\ƝI_$Χc{:"]7Bx9NKNnpK>iQk6>2m˒R;!YZ7XJy$\ZWzP%=pr Y ?3xuQh ת|kP-:A4-bh혞"NNIhCDZI9Y zW_1I%O0}JvW?T db-؋_/ Cq Ow4E+ToO0~\VYRop[iuRJ70?NlAܝi)UTƹygUTC/ţ}Q 2Yϙ‚9p<~~&2* Au@U^".èCpfMNjC_ͨ{>]%弘a;7wOnvZ4Mv%$&8'de ʒ-et?gQ,9ܕ??+Ng^ξ90E $.gR\)rb> ~7i6.|HË[s0j.}nY =mz W KݷW^r.% 5[&/!Ruw% d Uy$3.7-8|C_A:PVnNEÊjTP(ӐfEXM^Z; TRDo:޹Ӂ-w^<諿<|<:3MifI|6ϖS\[7|hq33\NIU,N/w8̻ ݏrVnRIud5Lj.;W<&\֭K.j .9ڸ0D֌!GH=S(fB"9A#ł6QZK9Z6.' e6@L P<]/إCse'\wHy4Gf8"wTiV5xTxu̳ ~H&q'↡_NЦx$Q)0*/c>A%-c.Tb,HӉpGc\Ψ ,wEA]z@Wi1NŸ׊)=Žhi>MY8PT̜V6T tۨ#  {ƻ۾Y=ܜE'ieu`h43Fl2#'NK$rPܶlSJU{NXةv}$UDK+_L_<-6Nׯ5r2g ns[`.XL䔶ءz/Z7X=XY3:0z {CTXaZHdQ(%ج)XژBjT!=f޵u$ٿBR?_EƓYacH,;[}DIe QHKvuӧ˓己6 <위r&*zAvlT)Z'U yYu"0ӁkQ DTJ]l*'܈&RIk@lHFj܍J5,[b!X:,)E0 {YAzCet<J#lseN&z1*deS8{)kfVzI0I_R!hHFd2v̺,!8#fĹpVL}Ajڱ' CS}FpQQIp\xEʩud.kqLۂLUì1XŁ,dbȊ aM" 5G(U";Z4W=j܍Q?)-/XM?ED^y"ޚHvxCE4d<k% 8Z0"1mlc$81e2 rB 1%-Ho8Ēϣ2"VnDl}@8}'_g5-Ee\.vx+YTG88 SQIт{[+kNl%ěL>9o]Zڱ-x(@ =IJ5NoTE?>Qūg`OJ$Q_0GmiYNEуKx0Axڏ\\%;x6}~Jʝ !,Q#|1?aҚä+8yJ^JRg-XgȾƒfAA[ט>(45-$ҵfNf_A^ w\tm2J"zћ%n%1鏿nvAͯ8Lߙ\)zcozb zto],G=hp4H<ͨ{Goz| 'F=AlhqQ$Wږ+ȍ6L5oV;e|S6[E_4~dr֌lȢɘ}L H9Y M2"$04D Zm9 x{i-AH1{e@20U %9!fSj< >۟]O}!qb2K^,%O΂J˳\h]Sbݹ8vsIP|ghiqLJPaV _4# RFs& BRG:rQvo JĜMImE 5#K*uJQ 'RmF_9GOx'BŌ]2$VJ1kmV MH|9x3q,e7yL{J YoZaC-K 5fo;!# G! SvP:m\T"!T>J1T(^7L(X;[RY '&̵9b5q戩"`[,[6z'λ\q5B!RSy|t(&ޡ }`pz>)d> =uIʙnB|7r>qqqq0pq"D: cDz gS2x¶.$*qf;^UW]E}U[&إ[;>յIwH n^;DhB}?=P"%H쾇) BPpa \)jx.pUlHDW \IiYU(˚DBGjM_xL3Yb;7`nV)?Y W?z.zI]M;ΆqThz)xy<ǜ b r7$XN%9xtoQDJ{&Kif(? 'E|?=?w׼w{fѵ jDFO_}|9l9ffxi|n m7&}/ܷT|&zy-y˃f N&[dh"M)@ klNu Rر]PʓJ4[HLb.?7Oޕo7glvUT乏v.Yl2UC\ɚ[|ӣ|Ewng0ӥO/?|ۺA589S5=2);Q?p) +Qࢷߌ$\\vRFJjm41RKbPҩ7!ǽgkI''X#E`\f24nO)gI|NQΚyH(ڕɲ5k]SŷaZT6^_ρ,&)ZD.M9&Շ .cꢟZz'8M農D^ҊMxEAM~l~ /-} uYɯrEBV„Xvk]_l;DLBLE=wlY͇' J%D/\nR%Y MV U Cǫv%f{~Qgrg&:2$z'0:q\i'@oWl`<0V<2aI"6p~"tAH| RjD;%h̵Ӈ).S'gs޴ҟثoI90Giߗu7eڞB?79}yLq0 \'03js'⫖l`bI`<qW"_^ø뮆œiV'k'MBprK@!3Lwm" xgp?zltrK 9nouMn+m9RV>sc¯+WGӝɛ.~q8JŧoXunʎKxX +=l)>'k+ajߦ]<L/ M?ӛ^㯯1__y=2Xb/7܃EAp|V޼ii>M =uf7J'R !8vD`SpFK jcj#/N$q;I 碗M:1m-8625jRbNuvc;Ҵ}t=B(oWH frІʬ]t2f.Q MgBhm ~dש3w㹑/5TӉ5_au)N_8-}lSycN hUi4)ggBHKFB$5(h+t|Ǒ@qtw8im0tf'7,zJkwF:>T?6ƌ7VA]z8A\TpDD0R\WgݥN(g1JWFk4q*8$93We׊AkdNϪ}k^ zݬ}g79'΀4{XIm\o֮ ӛ *˻ Q'zٕsՒxB3[,8c\En=QYcե9.v+b1"j)w&j'G(bv1p\EႰT\qtRO,Nϱ9Gӏ3!wlC/ZOWS/&'{fkcq¨I;n(@ Z80i6.sVROzG-6Ekhn+z,K)joqof\L.4*hAʹ%HrF-G['o9D&)T* F̲\3R<Fiz͹e[Ŧ)b.8SAq<'3b6sIp%x T;5m9!mh5NmɆɨ ̊qw~YvWOm71e TMSݙ\Yn1u ?|FݹJڼTms(ˬwDx64ri9'^Qngf;!lhdqn[Of{3_yN{އ/Y몈;&l+oё_8^- d}OKccvWYY5lww[oߓmJe;wɦ YՐ !@4QJzf>} O >}F ,$2 dS.(H+S @ *dNF$FfOWMrS59E0N#I*]#r!9G's'Ӛd׍_9EKJ14;c<c̘JHFP!^j|OSXy(q* `j%$T1WzġmYJ#%D"^X!,dUNZ-4](1rcJЊ9eNC׎En mU+|:aoIAȒ,HR 8%OsI깖,psFaR GI̐af9?UANIRZKs ɑ֖D. *LS'T'T$R뢓m`"#"3CJa@eJY>vj>!SL8{|pr<(>#Kc."k@2*{JI:@VԤF \L:;I5/b L*X!tNqkʂZ݂^ H-%V~Ii4k%54N'^z' 1MQyw;?Ǯ_h`<拯IYT>'e9NT\??cDho祓E=J؃($4 FE+|l;|'MƔZWtոDP[{ vN+h褤1:M `Y^g4|6OW,X4Wd!fY(%?CC#adG:,ԮZxlSSAjq("ʈ{Dqk%)&[9IЧ"5$,:ONU;ΘlDq6[\p&ĀN% 29D,WXڏUFj쑍vQ):qɡ(*qŭ@%*TԒii pM6As{\<.X;CYaxxXAn )IecW UYWxI*&bym!k>GG2tri)Ftf:{eKt_LID\җϿ2j9.N_FyZ\>h2 [9kՁ\ƃ>F=. =@۱5DpHIh7fˍr/-jLK;U ]pbyV,'vg~nvuՃZ,q~MK^<,K$_̠  h$ KRgӷ˳Ajg8 ֞\+lvKVVL1RhQ&RK"c'jQ/hDvP?V cֲ4y ,;ˑ } +=#sڗM "1d&Qh)}}_V1 n|=QۂveoCs6 ҕ|`sLFǘNp!$ ) @LgMt G B厍{``"*J&.;杗qNLr°/}FnUc$$ȣ`&Gk$RRYHk"JAq@Q&!#H呾&g{ך.ӭIfJ[njvL6WOg#Wq4y*j^@mx/I/ݔ?xV?],`_GuDb42?`;(>|@Jfcj(kqUb*%YNDoh<|1k|Mߔh gmfé~{|A[{{%tQҾK,VQR&.Ǭy؋IY66-BpWomskx\Ȇtb>`iD+I09>/oH1ykh rY7'!(o/q+B>|OIj"6F)l:332YMFIȍ,w' .#$>8Ԛx+jؑ2~f1!'*$\p'z҄ML@Cvs,y#|aUEnάvv]v;Vz(@'4Fhz+҂NJ 5J1݄ Fit::ZYG-صΨ;oe\@Z])jʹ DQ8geZ2-NyqdFO2$}}$0;&*&-NkGN-@:,'syv3fC,Nq헁wl[i'd\]gݛ\MGܨ }vEEڹ9=iGn|ht(3o`LBfOY(Z ih3/g0ȂJ%*+ߵJL%b)pܭJŬH329E[mh+25DRj@!E}V]dj y#_˴<$,A.r,~˒Vgg&V[?ZRKeu]jU_y6ZdrDS;{kmڵ_ \P='V}3ɠgW^4 ͲAir*WNxT- Zօ.楘"نSjLAJ"DTRj TVn97 ?0񲟨ypn?cw/~~}*g!*s$ ,#ss;g9{#S" sPP,wɻqoZ5-rgW5OxofO 73褕!MP8RtEVMYq!\CQuOf9 6\$-x%yc4-gp̏GWoO.. >,n"; ǭۿis7Ko9_]+߮~e[Bv}X݆I}h2ƤV!VfS8L.؛aLa(j⹛®Ҩ~70W pJUWaS^C 0 \FUWxJ!qeݽh`z\uƎ㪫tq$x \q/^0\UW+4w\A!ZpuJ W]w Pk;J\ 2^F*bY\2 ָ/C%eq7C ]g rh\Am/{Cĕh+ъ8kWhwՊ;RBĕ~LRxxL+v О3vm̋b@p@3~LC׳?# ‚tgP榓wO.|)Dlu\x˝&Ϲ1t{{ e]}&껰8eJؕS*XG?n3 # 1ÐwW Cٛa ߏVz5j'yq'"{dԨvZpԡWV^b{pl{wW;5f\A-;ʹ]&EpRXW]㮺`G2]u/:@\gp/qTo~t2iO\ yF*!ʹn\AhRxY:D\Y W]0㮺\FUW;vח-:<\\ tf 2 y}{Ҟ6sUWɼq<5N1aqU!J'U㸫.w뮺}}*\$5x?)GZVڋzL ϲS ]{Cv19;p;mqgفE>x=Ҋb)ѻ\?j//F!xGnIu\&Wu{njeOwr2]􂫧z$\uu< Z/sUW)nJ!kU0\E4 Z5{wUpW<qtP7Y}J~v fCz˿ؿc:/߾@)!zoиx>D αſۣ߭v{AXFE'׿oywwٯ8k>{ SO|m޾ݗqLۃ7Q{3G79"?7`|%޾N/kqa9> @Qi~Bl iQ>oΟ=w)B0gwh]F=m~_>/$x U]Ob+d'|&jSQ4=@[UNLVKTU2Neg\V0~%;:IcLe e-ou~hmf`/W'gA.G[TPa4WWiC7F ]%"YseUAwI/%SL*!y.T1Cq>Tt.F7橸;%:[YmHPqQA@PBokJWulOr-m[rEZMkEj!IBdRhF )ӊ-`bVZ1!5w`Rk撴qYR<ld/ͩc%j!`N" =XzHBqشv lv}cR֭GNJ3gZN1Ż3$< ́~sT-[G@4JEyyi{2 IDUJ CfcDRh*kbiM|ˡWN5; ïIj!B*z(.^1OL%f5hiNy:$Dy%C(GR]5E'g:ͭTUYy%XVsTJrTM ׂJQ3$)ŒKTCJ Pk*=%#BBՕmJTdM\%6T" |QD.b 0i*RM#hdGBǪP7_*KI!o=eoh| fXSe[`BS<sTsnwb52~08QjpyU++fȠ-]ʍaKDhcn52M,&ڲNJеP-XϺg*) WWeS%C0ooK#)Mٰϕ*p:#d4dkkAL-ꜷ)6bsŐWHU bjg(cdC2"4pNp$0X <Q8Aj&ij< .ӑS`Ʋ]gdXeR̴P` s~%UV uԜ eLAAX[+@ C5X#, Lh<@+׸9f5eEr ukJx[b΂#n n! ``ξBL>*)ܙe|DHZ)9mkL&4_`J-07(SѝQ"O:/PaV;v&XWr՗5WT?ԂW2f=fu4/ ަI00aƫ 6OБJF1ѕ&h2<r.z_ CFL u ,,BoLAQ{Xj,am>FT LD;r`z-b IٗM,w&NmDjr!ho-jc9/{O>kw?S7vpʶ!KYM>e:_MƛR =ֻ0 ǂ+G*s;x@0W(`:|%Ԋ@Q !I DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@zJ )1I z>%WJI @ք&%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R*0tQ z6%P[;B)ҙ@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ׬2䘔@0؏L VIGj3@RRF%cI DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@"%)H DJ R@z=JSLɛ-մO/@S)yλ|#xKj>KR .ҵ̍Fn4p؎1upլWި[?UC+XH•nvDp 6Ţ7׏:\J++RctlhઙX j&;W^U3؊U3cc\BjrnegR_'+wM=|p/ڮK8]nf0}1^ϭcL~&gҌڜq6(ojcfa'.~p=lcilxVDʎYeRaKvzj:q>YyϽ.Z_-*jt1 0d1ҿ̻ nXiytY:)}^7*y !Y(G:R5)wy LsId|<5 5M裏m t@|I#7MM`}e2v*kv 5'}T˚Z-me]j;Qtک|u]-Uh$2ٳ?}3)rg~~2~V=H$RЦJFW`GWmHWZa\Bm<"TCU3R:\5+!zp% Jz4pլ}ꤾUYWWk#hઙkXYk~YIp*JsǬ\`hઙGWZH'z2+;`k1vUz,pլ}tYi++NRg;Df=fW\k\5x`cl,pլ?ެYWW|)I6 l˻ا{Ə)=[!G\)ZuYi5´< .U|(iZv9\ׯJ wxN:Km%,b3]ۮS1.;y0! {Bo LvؤVg(7Ueom>[S]+Y0.9'.^ C@}|,-7{o38r_vtNnU7riXߤ\~a6}n v ޕg=@_c~,߷gܻ"vjtv^n6z Rj|[oXv+uΉl-a+ԐfZd*ibI&px2$glq5vY_KcvJ洶o[vB뀴sצWpG˵[caP˷ /?[}}4Hݻ.}cXlsmJO0u?IJs>l9Xkv.`pv-K<̏tmff.hjߣN/J%tb_W6tgӇ:LS]喁ev" qy0`GJdhzj R:5(cPɛT`!*&̪vcE`Gގ,NUhQh0sM!Q"0JH[3OWSƤnXK3XPbA7D}^X\i?>[Dh9f\J]"t%W: yxy2`t1VżˑID%KgU,Pz.m j':AI`Y&l,[SNxRhFY6rJNRԠxHC6 &->y]+-+ly|rsx|Sr JT]t솙1|lِE0Y`ymGKIwλaoMiң~ۻE5WX -7/8Ƽ(?n^ƽ¼|U6=ͳOWv.__tq'K>k5 Rk?RM73MtiڿKR^o*h|sעM%tu29#;O,ԝ->uw[.78E&Za<|,2d@pJ .R SF%e.3 2"߃^j/+WYKdu_S~/.|Mm|_.yS7~Q*%&TQ! 2%C%k%ѧ~|3ܝ܃>\Cز-k2{e&BK!̐!c0a}7t]O{4Q4 T!˃YG:\:W1'Sc/}ه<\y 뫸u:.OztRv$`c|,٢y'+\׆Or䷓ջQ= 3~6?Ϳ3!lvݏ??KfgRy2) ;`KL녘g~gSVzw:5!N>U:]#&k,XNhxX嘃l,_?96Q͕Tl/FK>.x im<M rX=7՝Nk1Sw+A麟FJZiqm}%Uq8R/-~ң5ON.NZSe0UC;v$ûpȅ;, =BqfZG&`BC;4bh6C6g=_-)oǕ^^Bֹ5Qx٭[16HW;IQ$q=!HNGP)e/(lFόs t7hHg2$}{I_+6ۿo(`6GcIƃ@J)0\n>fE^:S>puľ=q4,3.3;;3kڤ syI]>y;l{₿w#W-{7jwR*PWu譶 bז|65=|d|[Hpp|ێ0E9J; 4@͜W&RtLC\SRQ J(}4Gm"gTN^!WR|u$biAhˏRX>K;SzrV@AjHA[c3$S)NެRf@4*QCE2H$%= f4?ZH%9:=Nˉ!TZJ40#R9KI@h CJ&vY)(΀YiE4C2zi}TC,'DWhZȜ2Z$&n%Ejp&!K qI6$9 %ZI7ΉIO6zTʹg̾)|˃:A*E!#-' \23DKS*x\RgR#%VHӄ; %x r8`sQm-a57_u1͈h1yÝ,R %Ke<Y<Fmv:?w3 i"jcPg|"sm1t0=jn H@'[ uEuHU1CFi]hݷZvΩvΩN%'k EfB+YK\e7Efr6'u,ղCB[;$c*u8xҟx>1`fƣJ)E.kHƫg %C^ UFEŬ07D$}ּ|J7U$&Bj&-2-3jkΨxTJTKIznm{i,K%miNxo{w~0Gۭk1%iCa,AYidBKa.!I(i%A&F´jjq1p)K.z8mSQ$ґ\. 72pURm#ckl^5Y5,lmg k }W.Ίdf5I-ŧSg\M* ?#'lsȽL1#hd%CQzH@Rw"Zyd({! &()Q l&ݎ9ɀu-#vk܎qYQ79uޏO(ey.ò?Tq$W$' B/?xU}l.M޾gb@~“aK<&~K28+/?|0j{\jrFr _ԔjޫGEjq 9o=^WNtK #*7xms0nbY^/ ҽ´tR0Gy{?7_^y~h,nal26ۧz?kR$MBQ/ {=ڍ75՗?z#TM-H~C_볽>R咷-h+CU1UqSEsEBs)iĝ7Z\. B ty ꏕ9A> ֲy,;ˁ }$-=sڗ2(E2`& >FPzu_15y)٢ӏ)ēL5ˉ_+>W,<+du1gH'8x !Z 0oxiQ6'ϣQB M?̝E.;杗qJf.0(5$*K)hYI@昬BsJaHd!F&"5rnQLC@y>Ry$GսA-n/RRj>I>SppaZᚎ\(@D]c$FHkh&i Vsvg<zBOzgݟGӦ jOu[ڟ ~ ]0Q &ɽmm/xbxhA-,je[8/0l8>ٛ.E [twR8a?¿k_q6fu5EvO2 6?jaߋ?Ӕc"Ū.y^wE/].C®_h0OXo_- nSӧ;#KFPpIϚ#WHeQ/6lGš͞>g_]!g=-C}5X\}?mD$I7[OR6,u]c^zb&<;. B'F蛡t;f`/6[Ϸy3V7 \urh`z#>JVj6C}R|@f}nuq\~x^ԋs^mЌzygQzǒ^)Vfխ+X9!vkd隲̩%ְ,V7>J{NU\C&GziOWTRFx/ ~\m*k[Uh9/Yt%Q6IQ5jf}qV7;y} sWzĸ0zWJC g}ޟ&yi5jVNE[6F*̹yLVmQTEZAIe]44iFRkjW>bO4^,?MU DEɝ-\U֖NBim*q.[窶Y>8ˆ?` Ê?>_]][ ^zJ'l6Ya-*$T-XS25`+vuK8oW3OQoI/ά۩w[Ru1NhBVk4 '(DJPJ#+ttttNĮ+2jR3 jFfg5<韁fIWԕV1EHW ]!&]!>BJ)(&+6ܒ CEWH+R jf$vtw D+wpia +vX*Bڧu>]MRWyYa&󄢾*]b,{=H66g8_3ʘi9~d5\(;:q;S|!&g}^//DYWݳUdX XԇPA3P0gEsm(6|^~sļ#c3vd uۼV_Hm]sn;+}pWo>pj-=`rda5J0 #e;agCSrt]{.GUEnFU\IW-zWzAEW@#JŮ+2j; ]i q+,v]!Iu%ZK! q+_WHiRt5E])2;:B\O h-ױ )M4B`%4]!Tt&z]!uIWԕZ+B`DW]! )ӛIZ% - q3FP SԕNZAHWDgj}$]!q )Kу,1𜋌ٙT3P9ĭc?o/` ʵjr6T>?;in7K KOdmŶkyUy++C6B!Wwzqޝvᆺxc/USLЮBJ4b&5OrUV6/ZkkP!R"KG&E\ͨH,_Ok,zsȜbdsFUK1֎46*E6 ЕIosgt2\]a&v]}tt%ҕL:\,].v]^zn㷎qѬqtvЉ3vϔ)%=;BSbbPs\ zXIC\MO= Ã^nϢwLI2FU;a~Jxdٻ.@W.Eϕqg/{,]!.'+ )OBB`) q5;ABZbRt5A]I͝]!dtcO!FdBJt5E])ti2B\ i]WH)]uU:}Wk5]! )}껚 0+֎גZtBJn&+kU pcTt< R]MQW(!]!wfL"H)S$u嬲,x.x،LeuֱQ7cbZBNL_qduXChRi4ZddP0N0tތ ùxR0L={?dk3 `N ~\]ʑތQތ]=?#+ւ*BZbR6k\ճJXŸ"+֖wAaǮ+L^[BB_Ch+ )NRRZ}N@\GEWH+R*t5A]itϦ5]! ) b']MGWqo!]'sdtt "^WHI:4 = jF Ҏ5f*Rؤ 9;̻o 3{1Am&diNPQ4J4)lR",'tÙO"_N9;/%E@CpBփW:c\3,E;MG=q9G)UJ/j?+zuɻj@ڑV UAWͶIW)z%+׍$u"j8s) q+5,v]!IWԕhJ^ h- )H4cnFZGEWH]JYueJAd#/xHmBJ)2 k q+GWHlu-!]Wi ")uj NRWIMngϘ9d\*o 8oϡ=A)D`C'q-i}f@XjOQӸT _m k )px}v>o`_|z]ZvW@8孚 g8@8ɾ{ ~7 I0{qN;G~{ o+7ckvpj(mP͞^,OxY*DR4 '•Šs$HIf )7uhU{Wvͺ.']۪UTFZ)ڦ]JLum WLT)DtUMBtd:*W DZc@JR'Qz;'i~H34,?,hϗ''Gbury]QSbeB3hW;S!pCG. zvQ|Y@Pn>\Vgur_JBUҵ 7Nօ()2 /ѭq7/x jw=ϔwvw𼩏5o~7 |?0-xKYqmiZ:iTiJ xq21({q\ٿBi>n}0b^i1I3tܙɬݤ(Z%SU=Nu ɔ\HnUK%htZ3.R͚ai|{Ssh;BC(B%I:5~&Ut(Qom΄1h)EL$${j%{(*#i Rڎ(9ٜ\?Fܔ,=5y$\,\mXCѬ53\dlr=Pr76)5RœDhY{nb1;Lf 0kD34p'-Ռ iTUiԒ{G"Z#)yz4Yti:F7)K"LCt(.e`0 3Sc,c1mFM#d m* QN!yD!8<#@#.DI?}{uk&ۜsզ:uY(/`T2ʑR(U-рOBr>}:oB<A޴\s9 Sf0j15{Hdt=cNqI>I5b'-L1Ǒ֑ ~sO fQUCPZ:[ՐR@j%!BRXoݸTYOkh^VUBɨ|W!'c#ZZal%E6 VAY0H!Q!s(Gh٥=6*˂Bф i'S. BAnQx{F3PQCm>+h'm}^qDg() %JJo*|7 Ţˋucm̭ĺY.еYX@f4giF꾃mQO\@RZ6f=akotjPѦ"jх6[ r  (ljS K'FWkVَp5TM`ܝ&EȤfC2#Y6>ⓤӒ`AV5*(*-"w@iLb oDXqvLfj! ]V۞dWNc@!7CAdܡQ lEP C$X !a@YPѮi\5<cn-$,XY_t0wTL@TCJB*)ԙH͇*Q̓u\6 d&/sAGM-V│D]`#)#͂EUPhϒ (Ez@?PiW ()fmTW ,u9~˘AU]%׽64 2+=tDRH JDA e͒`3d5>! Db$ۃB6rZQ8b!z_4y^ȠΌ>Hk~~Cw@{ۋqJlPBr|\1fPTԃ,0F!NHeCOvۙTo>3>}_~oUUz쨻 L0B6#&f3x:p<@.A/БJhIWo#f BX9Y4<#y0_(Xq=+h_̻?Cwm؞춊NSzmҊ :zsQ&'t*Dv守8b:=%_"]Eս|zo9㯇eC(O1{z*ȾgەC'ʝ:{'ow'חGzs7i=})}m_H.v'ԚhhG=mNӌ2yHZ2wK7۳/ݰlE+?bW/梽FGWw8N2~nEv}syFִ)B4*4^=aNߨa,pqٞ͛ƾ dsK~2;75(8LRm=!p23;ټ?Wt@p9`~߼riLTU+iw[)g''(⌼YPLkcshgQrִ ׬\bFKGQz9_GOzZ,|uzQܨ򾓧Z5'ЕܡcV~u 2+Zњxt(I ]@OVDW j~mpt(CztEʹVDWmX ]1\BW6c+ JHWN%u&'f=Z ]1Zezt嵎gWd@k;]uVU5{ܠh-th '0UI w= t(IW_y)QN>5Q'2Oov2:5t2F;ḸZ~A() KDf˔U_YAj_IF|zV/ӮoڷMoѴePtam-p~@E 4swI-8]Kf\iS ɮ'~sG=k?eGOTjJmnZg[ͫg}/wcp=2pFx͆I9Û)C/ܻC0|Njϫ1螌{滃oL5h'ʪL%ځ_U{|wry7no˶/ǐ;~ \0oos]pwwxcg0D9?I0m!`lf_.Hrz%Yۖ`&Y|)yUv;t,U+{qhC+Tm|=$lӈ/FyWul%z}ѫs Jz!KYe 21cuVI,%rI0u r/I1/RY2e^ !oB+f OYtO|eM}d,/XP/5R /q%^.Ţ]jB+j85iVWT,9n*[n>9#דuKmocO]QyE+*E@o9#ll<,}5H <h2YV>@f$P3P) RxˆLX Nz A  9j<%bR`D)E $X",x\ZԌv%}iqK(ĔO%Ԅ8g+JKat k)t6jMAPO ?o>AU8v}Xa97-|@f">~mU^#0*AŒk3>撯h[ ,rQNy[Qz;%54zSxyٯ٭bflf f͑v%~:팓рa ,ƘIP1/SrYd"5^(< ::\`ó}O<]MLXORZx/s# 0ɠس,$ϓ 9*MR+7=0=B@x0zqFinQwAnOor} {|`T΀rޔʕ# zt黷-ۦ ?G_ӿb.ktۼ`.[ڝk=. +ֺ{vHkRO+JBZGd:ڸ&D_}te y …*k:+ CJN)NHN) YwU߄$.T&UkZ9YYeUi/I 8aut:yR'_yS Ib[vJhM=c;|J0W7gkwxNyŏ\(೴ݻMə/_C~ @`.s#UAŠz\#zV&9RƇIiiRXO=xm9.(pU˱m6*+o8TT_+Hh f+fO(vsH[s[S[/b,"1٣JB[)&GDP* i'ۤ*)f&Di`\4pJpQNh ,99vIqkMOG\\9$XёR;lu[揫O77SV}ytՅEFηL-k:tUsZ#H.3K/F$q2IN'! J2o&KE_YAדhֱ=JIZ,fKWсܒF'E@#uweNBc 1!6(~+8 ʺDK3\ *RYz__i:Sʜgf= $?/A<鹎)iI4FiV02&p8fJx+΄=@]ENk-Wyj@MD`(T11A AI-5eK+|voy{?4ԙFDx/\11+_,p}m4q=LǏPQɕaowM '|]dHxw!jԧM'ŀ8(6d&8㒉+vI-/dJ\5{6 Che@aHn׹+ H?x_Vupen-Fo#0B}S>ޕ7(y۬z'zjkQ%AK`r*EM5}&aMkctMg!8~RQ]oOҽxy@f7O~^>x 6I?S so<hVخHh Wo.p1Y1%[[RzsKۚ[!湵1pTQ4eI>zlٟ;'V궾Ŷ|ꤼxĹ5-u _W^\35o&7W JkߏVw_o^_X}}3*~l4/KhFE8ZKe誡aW$oo/?X~?| x?߼k: D2X|/ݣENV޼i jm :fJ'XL!ۼt[7#V9N&ēē$<碗è S2M$nKzPN Dܠ!dL"vImpo.U=N LDަo `,l\t2f\$R,)B 3! `z6~KB%pѽiW\[<ښX5- \qippz<v g/:tq2:Ox*&>䭹ҳOqz[Hwḻp| ^ -Gi;1 pcGU3^J娴>CW3*ОWUTd :8XZDGB]X/>K;]z 8ˣQV;(uL^[cTJyQTB霹VY&Pqs`eNj' y>ZĜ,vMY$EZ{~*Ho6l(ozGz8| Ye&[b8o0^ȕ*8&xb8]́Hέ8|t1I+h:  su2Hto`,i#K̀C'ոy&nם~8Fh=\yL^4eV >g(wƭʘz3O(68 Y2p1(zM0ɵ8N :I=t]C( *G[Ĺ_7 x4m.tg/VVbo5+:fy$fiKҾJoSv@Kw+TQ3-y}$rt#"K'ΙbL7g*\4B(ݘ,#bb;D q\dK%>`leٚ8{2YҖBp kN_k7-**~\г൚;&Y\=*\DIOdU%u ώ{W=uHtlo!w6)bkg RGm*[QJ/i8{*]'N?ͺB*YQjZvީyYgyPOwu}g2a{*q_8zҜw^tߋj2z*+k;w7{q[en7?7+xp}o{RSQUaT_QQE`6. |*O(n&@")Yl襕)4leJhTȜH$FfcwAݴ.w:Q@,I#xVi΍M -lQ<:P5kAˋX@X4t:z8,8$0fL+Rj hSж5q ](Ty(~*Drj%dR)`PrE)*:>Y %R9C,O,w['Vӎ']MLXN8IQ-O&ܨ4rM" ^6ѭk G 9`)I%g(]0w$ÜsL=anyW Ϸ|ޒ.e2ґH+,Vj$Z#ɹr9ѝn4RX ,&E'rE LS2f%8J+'㭓xLEH(E1 ޶u[܏"F~i,IK(bt\<גNXQi'"'D$-tʹg̾ ANIS豀:Tǀ@-'d*^Yij DG*sl@f&"ڒ%nuNY@1}v' )zOT4# ʡEo ^X̞,(DrRƬ"8Lc'c t3iԙc{w ?=jxss[ƪNR^ꊺ*P*QZ-ewyٻ޶lW?`̤E1$}۝ & C&0jՑ%;A~O,;,˥XG"KǪS_S< ۻ}NGEhWr@D&ZsAS#aZHdQ(65 ZHdl2yc2qM;nP`t9O?9c+JMb<1`M=|RKy7[wA{EdySEzg#Hc 8uq\ AX'ا lSAٜ#Yá?hARɃ]~=M2ͫCKn+> %Wa B&yQݥWWRY}<zap !CLEp4xm0;y-p0-T74lw\u}Q{͙7o0٢at6Nj _uؤ'*"^mo\_~V4_Fxt<.U߶zAe=.H~_ZRT^ > 1Be֗LaQZc Js*Xĝ-.x@! tm]hSIGpNSa´`Gש*&5L,7$ n)z EjQ ǠoWF6 & yqU RX}7)WנO+~ڴ-۞'?*œcR Q4B*X`m "D07QqЂa'd:bxb F(i‚HQJS0 Ty\_̝ז d =&y J9 KI Ib΂wDðq$31ds71 #!&N7(5:Ijw@Fj@57XPË3(SxA`K.BNDK|\>Uዅ4Z|N}H.{ LS$x|&F|7{aΫcz/{<2zX2tie$<+*h!L&Iύ\T.Qf}__f)1XW2)]c%CΥRAx9W}~^ΒҊ2^a}nF=ʏKNς\ڕ2[mmg[1,Z ]U)~qzp`6dK?zhQ$3<5g݃| Opџ{UpX6>:PQnIoť́4}jr~H9?ܘ'y~J>r~i6g|/U۷jZc*;klJ]3Gn.HָO(ƂI)I{~}x6qfJ> ;=&qzU9-vR86uRϔLR[A 6hX҂/Rμ{gS}_CfgL2<D.Js)D` p )/ cHjU$FŒʎ^@OI%D66y̜0Wn_ +^\l7 .#%y`DQm|` 8[\yXdFGn,1dR{@.z7y+o[؎"’? 8 `Ŋ fNJrZ*lDl 6*wq##۟]_ZYt9lBVQU6`ȁMfDIEA1ՂB(xn;6+lu5%f?І{֩33nlG]@e]2wd|~@ɕm4JJO1J10WR%c6(SZ8S[;<8Q3R.(R_<:$F ܍rQpF.FZڒ-Jm|,SȈ#ۤ '=p[cZJ$\+|áGVwr?m{H?]ۦ+~]m;sT*fUB+J:ztE5tJhwJ(9  "JL.UBk/UByGWO4JjӤ.xNVR.ӣ#?Yq6TIJu@5% ֔JJQ:8H@bͯgp=2ª:}WҤiZ(_~yeGNRE0F awFQ#((j)EX9  I~}g6Wxo ]脚wV`jfyX<HYO b ezख़8{dX65&zeg@{ȷF͐,֦nkw]+`Ukn6\̌Iqٟa6+>*H_}\]`~⻃]|uGxjb{ũ׍L7iXRj_a1v*b(:bA|OpEۚcj8]l/RݦӐ ƙk,.k)gj=nwZZhe~wKVt9*=I<ƙN/*C0DGKPQie(Pvzno07J[؎ol%WHaED 0\hI0δILJR(͚UGrA; ZDl h[$BsD#h̜5 2ּO]v]gsiyW]>ئOi]%|6굸<&g鿮髏Rİή6'Y&吜NQ6Α e*T2diUujVŸn2 OFůnJn{<޶Je-:f+VbRWYZ_0Rf[WRϓs\9`Zʄ2a^!X&HSE57VTu[9v&i8fPaVfmߦv�L̸S~M «b77@.޵#E/{C|_,治7 |b Xu%$'[lI~E-+r;60[ME6b=AA34(@ ֖ol6l_ۻ66_Z}X ~'mE5𹬁FvU|1i\-KIUvU*{L'{hbm_v=hV{ʣN^B㒊M534Ij0f]s WTS9n4fXw뮃l}tJfX"(ˊvZpsfblTIVث^؄xeLTJd]}X) qx.Ȓг7r_K#r{p[`d . >Ka^X#dIe*Cg<25C^7y$i'O(\!텰6"e&z=t9%Vo|)o5f9&f(SuBNZC΀҈ _ 7QA40S0˜^Ld8a"A{smq{:jk?z:hsm~5ܸ}Z=/3{]s s7z t" $5 Z$Gg2й#7Es#4NEYF=h)%4 >mƠI#&m”s&J@SbF ,EDҊIz-;jV]o@A0ywTfolHBiEhI!Q A%G~TX/]:"Yp&xBm0G2AN!C&R 5:tz$qv4}&5XV:Ȭϛ;Ϗ魗63A=픵O\]7j ,OghWL45iDʦYﰐvF}Ew|%|=..{f$ED6RN ։$l)X1%i.I!ȩGbp;`>}|9XXWL*ѯr.5C F2l0F  P 69YuJ \PA\ZU#5nd-Sd)m mbZ{fyQ]~ףO>9k6*kxW >{RaI` |hI`N[IjJxj*LJʳl9w9.:>-B޸\BRu$_j!VZ4 %co7қ,mg( A, Tީt;I{t^\-p9Yt/A'x%c`٤"C($k!4$g.FtڸWAiiQUbVB3kUŦ-$ ٩]59 ؂T:߳%vM-XvoH6AjvlJִd0{- UE5(BVeU~Kc]֐J`-595UTHǡQ, 3h 6PJ "93fPDU"/X7&&L2NA*gRN,Y4yD֤8n=K٣ N,͚.9T. w"'cb͌XU4ZV2 &,hd!:4r)ľvǡP,a DXIj+[_qȰCWnNp3y? @هis+E 2KzpzB6ɚ)ɂ!}:=DV5}Mç zyj/i wYzI$J!$7L:, XZ<2e׫֠@BnYqʁ)@dzРl*XAQ21zz#gO'y\gC:d:uNjX㚩OMwP=_]koG+}~0X'7566!S"zfHHIP$TT>US]ug}#OD?Z`TJ!*X`m "D07QqΈDHܨq [`:cA(V28BZij&0OchTvKp5cOPnI4^JJ8LNJPwK9 QMe`hMop79Kh ڿw+[7aX}4ۗKG+:տXҟE9GpA)+"D+.8 `=Khe5 <`49zBOF< E(7c0 x=RJnbG/0rJqFV'uzIOn$Xt 8l9"AiK/S"}v:"X뱐h0Ax5,fNܙI_MEjd>AOG-_@hBė4F/} *B b§,eJDͽP7A;D\Y{R.L5`|PݶbNZxtidb:оɨk N;:ߴv~gBGw3lgKs}WXB$KuFɷ$H*ZFPMecr_wun)Df.*ʌ =›ϳ*:S/yuZևh{ŒB83߿,NpgtzǮm?TsS-hSs- ;Mϧ0']gzǿU|q0\v4aoaWuwFG*M\ц4"ݘ%v!9j/ݩ9d;Ju'*җԛw\TP%k(>`IGsuϡ E-\u닷oNU(XW2)]c%C` E1nSt>;h%Gefɡϟԩ~%sF5k3iQ]R~xujzMU&ru:K).Ca<ܬpsg8\^QK{hQ$j(w?Ew Tiaq\D@E' MOMNo3dwR'0M^Ŷo W!f)NAFWaTo \]-+V\RrcAPY]4i4uξ-ty;xY_ץN#3>u0$W;&N%5Ev7[l.l "9x21P И+$jҹ_\sz:U{Ѡݶwp"5[¦ P~g{b?l6WLKɸVUALfv|_qYܰģu]˜џhˑ8zq`FYԍj(xD3eAbQ($RU4R,XiK3ίqw+zht=+^vf^L%L7p{#3c ܟ"wTkK*3zI]<)wqkn6^rJfvycoa(mԷIyvM J|]Ӎ[L?f +/vR##7\W2jb=]QKo`eIxk; h^Kw; X(`椴(†Hd@̦ DF\pwh}~^,:|^;ՑrV* ʣ̰69Ɍ8((>*m6` YWS6LWpounv*k۩lnv^<)l2`%a`;ȧ'&BNoa su_ʙOXэ ax> OWgߏzJ;M4)ɵΎE%Eu*Rg ̄2:IxpmC ~Hh5Co=ȳa9Fb V0 S|~[9ɋ*5=Zԙ?7(}x_ԋy\DVojim,Gh [wMאi?\Y0Ԯd[Qix`,(U2xi23Ń8򳭓o-F zrw&ETC4s FFeQnj'J -9%dP C%N5*Cz D;^s} *vӶy@ K2ޤ%Mdj.Lzs#y[3is.Yt.٘ے1(x;$F%i KnӢń [Zca"!tDOD(~nJ`1,RcF !ȀPp1x[fLC,RO;y#nL1G Emb `;XX+?"`Dl5ߚ8S(T,ʎ7;h,X݋`>[> J7F}ʛ;-/Fa[ A!rF,dRbJ)h3 ~4{<¿FQS%~NVG.u[tugNɟ/;*ȼUVeHBpKn-"3f$Z{gme6rT%DM/UV]po}F PTpfЃA*Ԯ߂#DFxU"C}bRq+"58W%SيySd-Y7𼕜Cs1h TgsnS UNh ԅ`, ϲ/O5q%XzyKqb͞y}o(Ɔf:PԺG[BK(VXM(x} O2IJs(+ pOp+@\J:\IIQ1>++%V=+X"7pj_*IKŮU W4XY/(_EKe{S3~+÷!FXxS|5e  ~>HH XQ," Lk[S58t{UGQ|4} 2_,4wI0txSNZṺPT1X9 {)sly3:;ue=XeN|_pt0ER(.%9,_!|kiM'Kxi )^?=^r-# dRKJS 3i2 g9CLJf,S=R`l*z:RڻOzA`FO/7IK鮓$%6!⁏^T ap'[lj+vqjz\GpOY]>L0J/pݖKqRK1ճQ\kGpE@ \%q9JʝgWIJ2\BJy>Lq1t*IPW?{Iz$+f4Nj +ҾӷU?~sMqR]wc qڒkJ%(Gb$U {tW>xR0o'{ߥj_>?i&ݲoT}NtS=; gG~2Grwrp S,D{Ǎ |0/20& rܜ%Cirsܜ&7irkxFG#AR9MnD9MnN49MnN:B&7irsܜ&7irsܜ&7ir_}اxX JR/A[IZ.w=h+I9+m-) YsV;gYsV;gYET#-$t&7k# &uD%Ԗc܎&zGy>urVَB"FewrZEQN4;<]xN :zTrїat)?~ ߺn΍樆͸͇4%Pi ihKX|Tg W(\ޗ=|.%ٯV߭vw0r7u|L[>uOި]xMf5Gv뇹v63-1-rĴ*='_ϙ?N/{-*':>b_2NQie(Pf^C;S[k6[KMc-i#"DŽE`.4$I gZȤa&mu-XRNq,1waʽI1qoR$AcywɏMΆS4ן.^2qo}M,#5NAYv T?8%1nD%~ɴ!y3mm՛n^mV݂pq*UsYhf݇>.sF,B0 P20CXYL^jP\1 ȿ+\c"ҏv^-!UC8W"5Wb.NdQٽǷPy =`Zʄ2a^!X&HSE57V"FMsFV;|z|7:mfzjۥ& m6ad0??[p2YO6lR_+6/#Ρ֥A|ZHdȑ F2гpkKY’4S-Q*N\`tJYTJ-c1US2`K.-('1beȻ^Vmv<$*8ʝ7\ NŘ8P=tHbaJ3j'bƽƵKpԽhW&kC/n3J9ov}j[Cdhi Jh0t")[Lkшgjȑ_a%w4\u8U~9oꪲrX"i ICQȢqjE̠e} rcv8X?rl@,=ey,+DU/2dNcJA+U9_f2 BM'z杽ɶDDIP/A!AjttZQV0MB!`Tʡ26e6O-pNOqOpz~ne~lަώ3\/P5N1q]hmY-{ q!McRХ/#-9%,3LhXV4͋{ϩ\܇A CGOT.WpˬJd2!j-gƕ-ZJ2[1^K QmCN +E34in1ayvFy\y*ImiL.],wkp{]{?n:v( ~(vR=f,fmfˎ_X8Ϊa'h:_cpG[wjt/Jdz4%;UWc7󑮮,V JלjlX&cKغq[_[7ۻ9U~q4e-lݽm*=h}>TWNw]˜g@RUtD)t7 Ss}{^Wn-=qO\8󉨛~L U{ZA Ͳc=p Pmj8cS E`Z~\{)SJCO }8dv~s7 x>RɼNH$ BYQ^ a|@K&i$2.EJg!Bb( IyaKYrwkyo_NpPCR2ը3 UY(M[q\E /$oQJ)Z >[̦ }xfOCZZZXl\0Ȋ)E(s()uCUH>NWwy6'w>s y[M[[bǭvW'Cr}wN:ⶹgs9洔U"f/1x)m=^J aDV"v ~fhVw|~B.tc!$<5l'Yi?~9NJa?^HG0Y!+`,EtHx6\F$T<-% {K^%!f}>$$f)J&*+L*W_oPŜl6%O88~[tTZR\$vIOU e&k 0󥿽)!is-݄Y[&ި[׎|HR$Wn}d1Cf ] u}p=c $,~J*!^P$/1(GPnnPÒroTǬ` &j D̥hT fбұa{Vݾ v/gI~w`<cO?FVF&: *7@#[@&gLBA!ljJ$~|6HRY%#T# ugCAџC Mmx#Vm@K]ȐF}5`R$Y)&c  M\Gm];(2*HyouA28X yD"+9mxT7Mh{Fneecp ^WRdNn1EX_J2k  b5Vp2ߡ0[ͪ東!3k s\6I*ȸJ`(TJf؜5"HziY" YՍD:(*c1B1I0*= E2ڝu794]'4ߙ2n'Gb|.ջŤ̯M?![ThF$.+O60?HߗkWJs)TDJ< +NAx.s$J ::GL(tN/ #: E3` I Z5+a!\/}Ra}E4YTtﮞ_1Sф>t9:-???kիͯ Jy6,GS+r׷fpbY'5jgFizl^kuSmy77a9}%Y2pTrh뗼9{J?!F9G:FuÖg07'[VLiQ5*5trz~1gŖ>&FW6u^ZGϟ3]_ʨzMC6w W4Wv/_^ON4C .k,a[a\:'y2g4-G][-xǓOXI_߾͟~|_Ty׼˂-GG #WCכ_vt_Cx5]}0n}rǸ;,!#%Sb{[޴NROj'=ɩ0bxaC.)_(`"@ґc9*17`'pXKj?vৄCA _lU0h,dHxVlJ9 Jje_gNC9ۜ8{Tww湾۴jy-O\{#eL  z[XiBff( [Ѩ3dONtLbrHw=Ĺ[Fƙ*0 "6?ED3"Dzjhg4m@+4LZ9E3(YNRRcmEJř!GNzMȖ2^G/ }G&yO_ub\kY/.qQ 8V&Pr6^&6V{dIf-Aü'G!Zsy.>. Okv싇g<@X_6wUx[LяV{84Q?Ҿ|AF!RSBʍlixJdsz1i2#Wdb><^.&-Yiëok>{_ ں2߶Kzy j`2D/Q*Bb[P 2hIk(V%GZ{Ĺ&)6#MGP_y &dZۻ-_mVlɧ|jxD$!-/B+IJ>: f/GFR,NLg<@O' !Em#aQ2\Е׳ཷqd˖S/Jg}MؘRLC@k "A{L0J*mjѡ>C%C RԷe7N =>@*]f>ŏaj`ɆYS*>̵W͎A=R@8,a55{uq=`|Pݾӭ4chz%Y,e<qʯ&6mkyp2bwt8N6sm;6A0V6ws(JV%"J6Ax'gΖoZG?/[GZ[?GhCsUJ7o):D?ԇ>}h5i?Ɍ5X6)h!j\˙oe>7(?G&>o6^%OkB7(_lj-kg6L}Yލ5Т㏳Q !8۟k.i:9cD2|YRQMף_i7Ÿ%qv.8Ux o,mL?/2(2GntM#WU* jX/(^|֋?* 冪?{AEm rYd,FaL41oAZfDf\fϟg(*snEȕ!w.\l,dUD ǓQͼ.;>fzw}zzзkiܣF>VG^nff2ʜv |N:NgG6/n&eT6|OtS-_^իCƫhβrٟH`"˖ɰ˰O~dSxu"O䏯t67;E~Hr$8Az{/H+|>mwFjﰬְ?ȗ$$%VU&.P_,gz>+v[o[~qt-*:4L?4[VοL8/k / `Ț(]GtC7uQtK7R?|uLvxy `6ohw%5SVPa~`zbܭ6WGbi&n9 KltI1F9Q}Gq[=9eUË( K&YYR+'M#̩X}kXQR Ҏ3Vо=t9zHЎt{e@]%UjK[ E)4w̕&.߼R^kιKf vQGwyva9m= l:~DJx%F}w"k:Ɐ&}ixoB^* Dٰ [k>y=H-/>jT/VGʋxjoU|,ン aں'164Lo{4׽wņ7Y{W02i9;׿8M0wVbisty.d$#5`ϙ3go`NP%i+iTe?Ӥbu2=[ ?\XPn< #^;sWؿ.aE xw?K;qD\L'qfMY=wP JCݼkw27|ST4RֳŇ@ 4glW+jasNdaR"WLrxURR 5{ݣEky_ֲL1[ B)l'I+Na+TCT9Shpߪi ɕ*u\Jm\W v OW(3 H- 1M*iFҞ|bQ{A٤zvP6f=N.o&h묬E+="*"/@a#ϓfx4PZCQ䕈"W9X:P^Tj#tn=*yVND֍Ul^Rso|Ұ/HwΤ 6pb篝\{vjD%T:ֱsos@+X"Φ+.5c"1+!)[ HKƻBwWRJ*xNFSJu\J-\WJI@|IY HөB#:H%7z+7  HMfԂ:PƮz+; Y+F]\)RUHz+{ұCqEu:C$ײTpEjAtW@^ ) `P;uxWְTa6v>(<礕$QFKm{Ad4i1'Bwq;ƤיhUjcᓡ; O cdB0 $ \L uwI% Cgvz8gF:}{+Oyj'xvjՉک[Z \[X "!\/!NNWTvmWO+aDJ"&\\kR=Ռ*-ۜ1?V [NgJ HwE*#JpEO&o;^BZ}۩nUq~RNgdƮP=Tʡ3G\'q sɕ:\Zm+Ri:eIW(1 H*u\J=#sܥfOl'CRW}w:͘15 Αo3{iF'3ʁu|qM/0bA;Tbi"𤫚yaKrM2N/O6@k[NNdUd P',Չ->Tٻj֝ H;NJqL;jറ f*\ZẎ+R)݀J2mJB¤]\D*"TjUq  U.\Zw*ev<p4wbc* HV]4zUqe8租lN]\U"!,WVpEA'+s#+R+eqE*pC\9nBo)d2Bl*">Wrj'@5;7=F Fʝ;+^ldHoR`dވZ1;Riŀbd˰w|?v;&:LIp?v0}*$F OI;œxvr>"-՞jB+cJ)Sosa"`-53J5BNW=ĕ HWBr&ɕ6\SɵSٵ0WO+a: ɵku*R=qE*pC\)i7hi)dpErA+T+XǮH%>w*mMJAX:A˓"wWk\= rRȀbyc : P-(u\9 h1Zѱ9S<,|s4hhqmsv7]٠U$Xܗw%6놐nҮ:$AKi 9 a2>:Uɼ%o@H8bǪ+?"SOo'p‰ۨu,NposE v P6\v]T+2!\`P >#ru\J\W0nR `X2B*"BvWRW}ĕDJr+ ^B]y>qpW˴7Rl,coQ_ͽK4瘵oLk,H?[tv7&tPo藟L_bo3f78w]_u8$_\Oh,H0FiN}YgmD Ŕ6o㥼^~Ö\sٯ-*N^_ku}{NXmv-ewMߣ)j>ٚק7Su}Ɔҏe냻eQTb@ܪ`>zQ,[a7 {sk>)̕=*'3+oӼ"y3 :g9ʦ/sq6U.x)vUk^+<7{sԷ_u#I1%>tc}_w>籼$|W+l`EM*oDy` xeJPUEluҨ{SԀ(Q]{ߕQBq%+ %%~(E($&,,+uݶZH̤녽ip*iBY*+VA0:*,o+-V g@V g)sL6F#ZpC%֒,ʀ\f[qYWAlUE$Tm 9L//oХfKUu( !4YUJG̵,Г*Б+MiI4w_l! ؘ+! GkF,@NqT`A 5hNpDR_FrKyiѠ%1^b bCWVZ dR $J&EQaU!a]k+BĊV/*@elt`XXy88X0hb{J!F40P{B׳,3g\-1U0"@+/Px` 䉵AYݹUVe>VVkc%kUAU /$OW:>Uu !kmMrzE5b#q-YGeR\hZЬ2vި^ўLs [(k)Az$qc!sPVPB ׎]SPP&VqU:I=RqA11vE;"p1JU  * +5+0Ȍ̆veBCO  QA\R}2ɮ3O%Ce:|=Ʋdr2qnB AvEm%˲fH57]\?7(c)(|k(P,Lh=4v#]7cEfJWukʃ AŘQ cG !.A f|)*)ԙ5TDqš#3إ~.V!joSCAV(J892rAj ތڲFxwD"=dH_($T qj2 R]WW.#{/AZ#qݣ.RDVKd&LcrtVDlZ"5rP"ZJ( er1! aUh#ǻ=sA ԙy?/hwH3fD$' GcE( / Uiv2\@ӚCz}NmzU TA׮z|ou/ L}6=DF: xs:paяАf%SdIW=$ B+X(IT4ַiyq$Yڃj0hfU:9 6IҢGJ$mPp$JHu\mhTkM!JKBq$RVO ]j31iGr6Khް-G' 4 <%2`Crh[SP.O(7"Fh8(uP"˕,.TP=`ePˌԠ*1#K[Ƞ rwU߸YaKl+TO+ IISL1. ;X`G +U@ &eCE5FHmr3u#=A l?:jNàe+J ֚'zʤ辐X?Д n Fk|T(G=ZMWwB%aI=i4F<A4D< 5fSMhpUf=]"AC, c4&i"SJ,=vR@Ւ0Z Y\ yۨO]gF358F'eAA(}ԋvbyp8n-:m)vv~'\>9}l-W/^}2OJOM &[7€|+ zU#hE{ˋF}>{ |{e[r.:u*/ۜj.{0wBz!Ϸy?~|eOo9ƓE!N—4уrd9ϯV _{w_?OO:w GzZ^J g)/ R,HrpoyoVijOȮWėpme޶ɛ4;5u:Y?*zӓB#EA3EG+ (9ՓZ{ʽp W22bpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wl+?SڐJpJ)-~cBJC]X6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ/j2 ~J+6b:+'cZpEϬ)6\=EÕQi6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ/jMaJ+6n:+ h;z=2p W`cpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ Wlbpņ+6\ WlpAZlW?<{1'z.a>|]ZˍPk)!a'!HX{ bBtJ~*tEhY eLWO6 ;'CWP:HW>Ԅ Tc) :ݙ ^bz:tfJ+T hRNWR;HWs-ngs'kxtq==;fN. ϗ;ʜ{fNN^W'9ClcNwu޷?^l}b'4yz^m9~mu$t̯'9(>WwyyX_g4o5>cbŬ]]aC٘^߷…Y{=DsnIj!wKn- q'>ׯ~ޑ?)3/c>v1`H7khɌkQ{׊{veۛ!KФ)"8tE} `RN&n2bІK(#?֟^)tu'ZGqnhC|JbJ3]}n(+d pS+B?]J ]5+d8Z+ձT"]CQL+7EWc+B +鱳 -.d]Z㎝b)ҕ")]`/'CW7ȩG?wضdz‹ڇ ѕ=?CWWOfК3H(JVCWA8~ p"_z}4)Utzwd&9PU3[M~X[Xb o1ES騵s?gj+m mcYCHҰ˺uΊ,NOٔchTVE E/(U| W?޾`PSl_7>!UQnIiLw?>v9 /uuK0u0Af1|ބ+zỴz;{IjwA\K\o5Ӓ_{+r˜g'Ie+H@3+7ʄn~wBf}R)W=ᩮv) ?;N*[#X 9~ޚ8]?W,:r2yo::Yivi$dJtLy):ͮl"KHP(>$&DGitR kB[EܚWN#L״M RQ%N(FqI> $ '^ҭ>,WKRRh% TPGRA7'A>$}|M )7:"uk4J!ӒVipR(ZE =Pk5$g;˹CBjnUo9AK`%+^ō*rV[UVZoekeZ9K;ъ,}ִZhd#S["xxs\|Yo tOhYŽ77Jn>~Bf~ݎ%dR%B{ ٝptYȽ"C,T$/;5QVIrW/=9?rc2x GoBN^r'qkm#G_EvmegoY,3AP$.Hrb/peIHb)fUG%cr\@JŃɤԀQuZ-Q%^*zǾOb4cҾ@G |dسKH9*3*:+w+VnՆ4=BRg>UkUv/θqg\3ΗCZ/[jw}_^YG78{:J\7zK7O3y|tM$嘐}G 7q;M|_o*jxs -W?]~zngǫqX]G7ϱhe#-|x{τ}_ۀB 1JHX=+̒dTd.t%f"sFzg@:b_j|~K`U^Yɋst3r9w!HeیL12e/Б x 2%!LZhR\jco$/|;z~uie~4-30a$h;f O\'{Zt=vƩQqrTY3Y1L:\LZ}i:2m:e7a5 %pmJʻ%Olg%;8au;& NP”A"s(1^ 4F}Z#gsL.?sjzyMk٘xFc9Yjv7Sɛ/VnXn}&Yyd-@LWWFm SoZlO6JBJq*WDǕKhӆt+fOHK~;Su I}D7&$d(Y!#a(mM LfBa\pJ I$[:Bݰ'i)MOlVJ#v.?BS}ypF^f5:.$KЋ6dL)zB 'DJ;d`&KN&':AeYrn#H]8*pIX~7TA8omfbV{lfgGo7\ @\e(/8Z9)e3Ine)iI}pڥθ#eeK&ήk$x ^dJB ?[ CPrtE02bAm#~EnT..=ͻe ӼFF?#$Uԯ^*鄰iw>L^hE$3Ί :Bp~ꝣƜ{żFf͞X?x}=fm!#\M~kjßCÛ .w?*ƚFn:GlF4Cg0-1bᨖQeI>n =YٿOwL {$Fm+س]b5m?u5 ɯ+s~{{ni q)qn|0?%Uqm%380 S/-ң5/E⼋ҵ2cvgSg{uAL_o߷_~|po|MgL1g&2 {)N54Z:жY|q=os ˕!0a@.e cԑ6ģ$z߹d$'(1*ÔL/ lFόs t9nАdL"ӓa!\{u&@͡)v%21K&qX$L ݽN+:^_=q6:b?ڞذ5- w|g88=plO9yrΝI-s9\#5֊}K>Ef;?o#rZHE=ٛeBR RYw"1 pG,DtP*G}!}4ڈ˨^!WR* bibʙpct L%,@18fS) >9sLNyƍ;+spV;oq5o>6<9'ΐƺ{V'5YWO͗mqwPHPs$/S畟V|>(>5iEΌ\,\g3 `|0^Ws5p3Rx Ae^z@)D+HdEptN&)n|6Ҋ 8tr_Dܟv=ي瀳m6ӕTbf o¸5PP2[yBWǐ%s>\2X[[ `I2ő$E.79'"`qu>!(\F @L#95r8{:ɅnWWjW.:Eh+:8%gp\fU^ڹ< JaYkDiߩhN-M0& Z} 5hԑ+$\ͅ"1§!HR->kfdYEDSP=b ː"9\dK%?u062lbw1ȄKI0g-#\n7ל aİݟOKu%ܖ+za?qtuF~UR`R6'8Jm9}Nu%VzӺA'^:2aTc\U IKuB/G7|籝6LM]o,Rq+/AA{j2!0 )1DAe~!;JT/8R 3Y˒ɬ8}fRϚ/G-eUՂjyX!yy_pϼrFi宧Ng73pe]g^1#Fq i!Fjدhøjnw+.d!|!dE<*ָ%UKgk߸g{IIZTWPԚ]^?:ba\4Ƥu=Һ=-p@6$ݫYwH-+jYwZ{=?=ܽ)&՜$ \WxnX~}`m;A[s1E}NwKm6_ĵa3{We\t#YڸB/ c!ؙSs)U œzBb] #^ <'sAKYׅ28*T.RE!IIgYtY_HuyBYeu} ,GlJƔc2% 2ldMfCquLDV+!cJ9JtdVR-F{C,,c,2+&zHyL 30i$xۡ_m0B[$ ӤN dT@\ 8$G๖,p9Q#*E蓤̀0i3CS(SLx $#L*uiJ=^KL H-yz-n@[" d"8aWYĜc { Bn!ܵTkRy0_u1͈hs30B.Y*Ut`6xx6n܂:ś{~Rk 긓\$XUV HP*QZ-e}ss*}q1ec@+K˳"rho, lO6@ݑ "0R׸j7$⳾rBҞ͌J)E1> $W[ϢyS0 dY/JpnDI+Qg'gO FDIZ: DŖ5rfTP1RBwOM}r:*iMc3}1|RawyQItK0bfVPR 4$K̓l Ȅ RYv]M8.2.egL{F4Wc>tc̸E@0N 7tc>$APVG|$ r$Ȉ݀#%SU7H;"}$pE *(%TdlL*' igὲMb nTr_ه=9&G'; lb$"<wT8sV7Тyn8G;{.0¡sJ{% %f6};blbFaU1MnL׉]Laj7V:ZBy={ v eР"9W'6)Ec HA4fJ;_I2zўG `?) G`\pEH2@?Ҙ]ӛ7&<,;8;`l +6LD'm%^ mSlJ6Jqzk<.jhd FRDqaX[)Bፈ**y4CO ͝٧ 19[FpqMc}"sVK kUH@抂5@'(n҅IuZi{.[c'6U:!o|?59ޏ~.w) 3ZV22X!P2 a;=aFuYV,{?_dqiԵaX֟+W[峖nJr؇]Tޥ4hɓx'%ǣ 2amfoHgr't E*mADZeԦ3CZ֏Ѱн̠<^Ηn_(tfX( -t6>jo ڕqٹJbʇI"Fp/9P99S`@+.1xΒy0u0P#a%/8^kri%; )۬π_pK6g._#̾q)\BJNI惡&>H+h"Z9ܕ<̀g2_=ӓ# .]9 t !1ѮnZ%SP +&)ў<uGXz# J1bjZED&IW* b8&%k}c)җsbChp@ckv2 \Ȇ& tp>ǸS@h%EB>!seT2(dKr=^MgitK46 OO:<J|:,8<GN&T>xA@ v@@GSe>Ԇ!Ä=) \"G*R."{X Cq[KbN5x =I |8d?.e'譴l~&c*=Uͣ`sj ک](/WKcnu1-WY.Ƙ4ZP ]][S#(NuP&)t!ʚƗ-|P>_{p6\z%4 *"nRG¡p~\h얮[ OЍAo]osnCnPj>}lԾaۻuOuGl 䓔2FfY&,9Kwe;MmF]<\KE$`G|/CϤ0vm*P{y=kߒ030.YB ]w׽EQ-] C+e[Vd7V6ݍW7xֳ˧(I_OWMhմ1Z5]+#7l;D$yr炻 n82WRJjI[~|mJRV4便^F_PE j8MQ. 3ZUӑ"S$cao0ƪkH^?ܨKG kH29KH}2d^ېpBbE߁m3QwXG}ک{So}]#İUߵx2a쿢L@,]j9~pR} M?<>͞/Еo'w ]o~G4M.}ُwS]nt7J,Ρ^ʷ~uPmgrV]ɠ6PnFg2ѕt{7:}a_?U"tyV"YlX:5~.~\g˭/XfnV/Y {볬jp{٣ Jca^9SN'Ӭ6fY[jk?Pʭƻ+Pk׶l)Peĕ`. XW W̎ 2v\٬WG+ɴaCBvW(@m+Tzq8BvW(؈ Z\ZF[?v*{\%4R +\\֙ B:B\u{p\\љ *-=WV_XR>w(r^:ٓ5Ǎipr`"#r ^i6M2)ͩ0 4%Tzx[ b>%eqfn+gPbAY1AƼ&xm)|6\K} iF)bkc{0tl>Ud/Ρt,;DP}6N *wf aNٛ%|u,+N@TWo B|ԟ~||͎bweryY^(m"9A gUn]H9<BFƪ.Ö[ҡ;}Љ;èVFR7c -^>"&ؘjj7eCr\Wf=R!\O& 2Fڎ+T)h#x:opU+b2ێ+P)quR%\`J+{%eBks=WBJ۩ ~USR*+T{(7TJqP!\`;+{5S;w[Bkz\\wif]5+UWpj m;Pp"{\!ti 82\љvTZȀ*qu2JNYW(C~W(vWF{;P%WVnR6>޸N{^Zv1Z>8@8_'w_5c.JE>^zz-^bͱ_A m j ,jOTa:/|)L );DMp6hI}QEdNBn79-NޅE 쥺4lN!7'ڡO$7&+)ߐbʠ.ZOO1Gd™hr4]:jj*M?jˬW# ; 6LW^ѵZeTyŨpz\TKmp)=^wS;+T)z\#֢Kn;++LWpjk71kup`pr5njm;Pf=W0aip%ujh;Peۜz\= K\\\ZNZ?v*quRF+%:+) PnwQ-o}gUʾ3x*ӥA P.#Bm;VՃXUQNg+TZ+TIĕkoEʨHri"\n>kxB*Z}XNvH/r2bjm^_vYEK:l0Yxe&rA黣\Ab Zn J%{c1Y8|}7nrɵNj%9vS)Z6ԨwqoSkl E%ߚs73BWm=WЧ_W(X *\Z+ێ+Piڶp \!\`DgprwZaڎ+T)ucS|ml49lgrpvCv3,p|WO޹\<8kr,._&XZAHElra %S,Ol;̳d43^wrf-,kdWU7rgW_/J4`b'_| &~WP~?8q`OzՓ ȧH씳uXzQB3q'|^hg[FaSn^`9pX*c|c|aRܢ|4W7wN]fUAgU_K+.ōiW+% |C.5R }FTV%|X\/@÷}@+ Lzql/wɡ]Z)1z" hBK̋O6(bwmI_!H~v}N` b/HbHC߯zOCФqٚ.ɨh:se*a~v)F~YxX)<ś/յ|PgŴ< [_kXãa4Ċy@\CEc5 {S,C?"EA6A >l8ĵW_ G?5ЭMnz^\Us~iu{p >t޵BS^Eɹ~ͮOǛ|'$?J*춼܌bz=x8<KK/‡`sTo8f:-%̐oKPf;ͩ{܈$v"LQ2ל4 T^DW4) ̉}PHB 4 63MLZ G|OS$]歉'{XZ3Sb-/KGj48ub]q&6IFֲvߩ #VWFs8\OA|:vFD TrnxOv]믊rTR69"Sr1dZEh$$5*XӖk녥!R"ɲNOPE#DF[Fh8B YkbA?tL<%jIR),%'朳0H e"'XEDfsf? )kw &cy'@`ḎֹN($$O,ic`_xX&'n:,fD Nj57L[+%.ˠs2%- t^RW3LuL -,hp*GcKc4$GXg$. BXLcgcCn-h+w8:wi܁pV<؉dPpb-/ c@unup4}jeg7hq]1HB$\FSgPFkq!Ft>VB7O2;߹BUWBZoyH!a:P]ʾ[O] ;ϥJH^DՇ90G}4S $)C%v1&…+K񃻋rF[m'pf`qA w@PDhڕV{Yi}z\8m{]eH棄#y}%Z@5TpØF'tP^JavM^pHu>p%&oWP>_mi^W5rS\b6_xζcKanܧ0 v5kjgjk7\BZ2%!t klFSdya8l`ż@67i9FvdSM}E2o%N&WeO%QC~{fY s c/.&7u~ *ZJy*ַ{?F%뿿zo.|?x߽zKy^} \ e !$O' "7mx[Mc{4|h6-_p+bC:ɑ6g$vT$P$Ŭ # A("D6RN`2FX&C(LJv͉siwY]~c:لU]ΉBw%KlgqճIg<8KiL<+L,̜ _JBQL'Aj!F-iH X'VKQc`p;:l#]n.᣹ q~ K?Y2Æc'bL[s h:p__*jYHAH[ջ6&F:ZKobI`^0*ÉIӥ瀳(5 I! 1RFu:%*EKZyj'@\yގפg&P֧E4͇mwPDP,iݸpZԾG{Ue /|0ML"YRRa_B+ ;2G&4gt09(Μ:(˅ 6xI!YnqBhf zfH﨤28f!8ZFe~B\/Ǔ\ݮ:[`f 7^ekޗh?Ik 0G/$XN sM%ƂOB9ٵӖZkMNyjOK) 4, 8dz-q8mh"~~}Y#sh<TI{d_͠ ƶ]aVEDr7K1ǯ9s;Պ9 Ygsǥ S$I$gha͌ 2t#d$NDMe"j"tqV;+Ҟ5qZ8j<| Dj(G) wDՔ% RT෇=qPrEH00wozfK-<}\trdYAe9 *ANa`/˼8]fopZH櫳񻉋=7BB C(f<' ,5@ţfCs,Ō}{ZElzٽ_+- Ժ| nZGFHZF:/(_?Crz%F!E%_Ñ`))FbkdzQ ~= یͦN/۸ts#="w\]4ְs/pn"Y  {T7+)d)Z=^S![ȝ#Tj'T1v:ޠ HmG ,]?.һT,Y}{4 d%lݿ]U+o ٣浒Cv4׷E>s ~;Wf~瞈kޱ{}95_wjLYKzDZw6.B*բtNa.~ Yj& 8Eݨ}_žJ<釲h~+~o-zr pT*`ċF6%Dqh*jRd uTѠYE8Esy>;gJF_AR#1ixc{CQ6MrIG;7)JuPDZfSd^R, aH (p rO(KB&ILyB:犷 TUI&FF F9099S@Y`y="YvU5qnC~sޔ]yL>ޭR nMb\q=}DhlpCF״gA||#T’KHPٳvJ5$jQZ DceX;ixig-t! .] 9 cf#5@k-}"BU4h4əv ]waJxA?#[@b+,:F5f/wT`3!$,*  1&ȥ%^tYwfW2X3g? Hn^mnopsw![Km8No'3n2ڨ9ᔈD2N(ڶ3KUM\%7搣TIQ< _l ,d2%x =axzn04tpC7 z&}"ksCe={4_N0v' \HE?SWl쇖%:?[R }vΫN?/$+Ef5q h0V/#V:utp2w+> gRO;~{wa$0]w^O o+\Oſ_z(_e}6{דPPr(:UY\mnRdXnioQp-2l0iO@~Uwe0vW>`kH.PuaQp`H.O_EnQ8>.+('TP CiL !'9GWr6Oz9Jk,3kN^+%.F]7Xga\2Lz\XfD]p"x8V~q~dh0&_ Q+FWU:;vbFAq; T"E7EΫ4l MO TXo0gdoX^; 5x *p~ߨ2kgkgõ_ry ѼIE$.&FFDse,*Z)-N.Rjw 5]KV J).u|?U,?Aa, `\X=Q.XpSl\s&.|/eS9Z<-^7׮[ǫl\L3^G=d,rTדPS?'@M¼RI}_\ry (R?0^8y0$hCJI{f2?d~FVNMԛNw}" X+LnXmUagSقšeAF] e)%T"c&%6ED2";D1J@ojgsI׌[(hk"sE꾣Ji:kJ)O/HLB:cJ!,}2JHadҜva{yAn긑;V}*|pwˊ9p(BN)d$> e5R`NFehj׃z1"|4D om#AlTSs=ʮ.Q]%&&6 Yͫ8֮ [5Iަ˧y[ˊ~vȏn];vTHKʗǗٯ%H HJa<UZw]WE>a/K_#3!U>&0[7y6MWG|o^0iڮhQi6fλm͟ҴqY_rI}3&zW\a[paLD$Bt7CLC!̮xf[*{ 0ctoPt_P;ARJա3Di˹Q4l>VLn/fny6jC@%G@/x֕YX+Aot;!-d-Xd4Aesi PH%*!D|:{׫]gk&&BEojV4C' pphGp+7p/pJwPJ;zpř| GDq((5W gWM ,P\PZ 4WOYԤ/&n:a '+_`xqUy0<*/_$0 |%hKJ1I0'Ϧ0M!:nOW;HWzRW놮'$bvAFOwuS[ ]n0zNҕV,=ef|vrrv:S6Np}$@WgߝepL-]O"9!M)E?QѼE"x5wG?ή|#_ O>4|||XϣZ3UGqSy }c* gxusgw/ -89xc}t++%Eq=_Wnd*:'놚Rowزrl}ä _Z;[ W# ~Njx ,*^gp{m:(Xl}0h?}?U|9~| y #`yPwTC ŻR&X5xf%ŻT|[;Oo#rvlp#`Wڋofi%d;K2!^WvN sGC=]oO^m t=W=H^wZ9䇽|Nȿ;ƵW:Ș]s4V ͟-d(H0~$H/H&$wAb7ҶbOd|Ɓ-]GsX~@1\rc%}P_nsBK˶lwX5f&¡qke[4<=/FNXSyzs͟'ʛO_s-6YU{TsJS.f!m,+^=!7}튟>宛inVλR:%EOs0e"zz(TߋE/LO@8ԋo &` ׾*5>~#t&ʰ]yX&NvDW n ]jQ*+#buCW +Fk\X{^ꉮ뇮 a (.ytDW 8n p膮NWĞvw+j\+jgj.ҕ 눮Slh=]1\ t(ҕ |,+BtCW W^>Qjҕx#`+l7tpe7jt(j'*5B[6J(C#Q.Qk4;h<+Q [RKl؇Ƽg~]j6pš8y[WO}eW`s2 Б`+vpU7F-mH#4G M/Ns 5 5цzJ/lQAWrOW^ 舮0nꅮzb~OWHWZX!BGtŀwCW uhvP=] ] q ڰtE"attetDW,e膮Vt(ҕ$툮pG 5bNo;]1NҕNAd7tp腮mzJ)v Z õb^m;]1ʰWWHWAAPgQP#TގAXJXfJҌl FĞwurxCz}]$i{s^(.{7£H@(KOVZBkCmO!^mjzҲ#bAvCWzbjJ)o鈮ؙ~t^=] ]i-EK7BWm=]1JttEZ4vŀ;* ]1Zt(^]$]CD={]nU0Fʭ+TzOW@W?4>Уxx;]_엣cn1oh <^77~x~)Ͻ>'p\i_-س?5 /^L:c d0_,ǫF\oиx<:,.tV~W M[\ 25^<棫FV*ho{kG70wBzvw{A\.{vݚY|Wh^|0ɪO,+ljnE}** {ẇ{ntf^n7 w)1- q8:AC9o*?_e|>Gb¿^jTQke|z]E%iS E2i+5(mQ]_oW7# 07ڋ%E-o9C]̏NK.EZY -o7F hUזMFFa4OLQd1̵}I.I_(!9.1k-b eq1rѪi\X$5'yvҏuF#wf!5ms)j!l 98#5Q&tI%͑!j-/dDjC>yj-N%AUЌ\[A`uIQZCд#糓TcԚ1$kFH)Ldit2աbki{,u_z B 367Cc6m@Uk%AёLYTZN1 늳 <L~&:$%LE:݂6Ls,]Iix*)5 #4zrUFҚw-"96[̯{! ^"vOH Ј "І}/VΏc6Yxc*lY/O`ɐ"ʑ(d[5w^Ob6Λ$GUNȪsh#QJrM/ dJyzo ~J>Aލ %|'X5E_G8:Bҏk: -86l̔`MDD/ 2%IkSm'MA E#g0IiZȆڃ*P*޸ҐK  (r)OWf]:FjAuPqLJ6# JF%I-G B˅هY<"Y78_4)X ~a-<%EHiD(2 !(C4V8 g0l4-xOE# }IEHV/Vgx$ ې, x!ܤJrB#+QQgj"xn[QM uY'Ocp/Ύ/w#O*q>e[`I%DBF"G!A]JS0mA^LZ@mD%\uj З 7"8Bik ) 1E`JII9Qv9 ڱ-<  &v6̤D$V N+]KdPx_!L"<(Y cn,LJ3%Y@i9DHTy Z{(o,*"Fp(XTޙqli:nE@0df|L07r'E4\[MlQkbG'&}ԩTS%ܠ2̹+ c;gQ0 ӉTICXsA.7 5,Mf5*)8B2U#A(U9u{YA܋Yu^a4-? z]$/4$o0C:u0q nui9gDyukt1/ۘkZft;16KD0umQMW-d0LB&Sns`*i X,Z`^P!58EMyjhAq=)dz_o 2 | wCDyxS!9njP.H7rI"VŪR盉`ebEPZĔp2"ЅK=(r -0 4bMHuiY~ޮHx;g7t0H5a7KBuq7.Rr\/KC[:B;خX0!`ڌ$޾>2e: TԦոïQ! __myɯmm>n1ivEJg Ǔ"fS'ɺHo.vj?t\˛mA/z\xbNV ~!=-+\~l-woM Z'5ǻnK(f/e5fS/|50^}'Tz V:~X~S %Ǵ-D3؎gӭfͦ[Zox$D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D rI aѩD~<$h+sFC k Ԭ9HDI"H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""H ""HK36ؘH %C\GCZN I (9E$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@/nL$ ә@0׫ѐ@m:xV O$K$0:I$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@D D$@/fibߺފ&RV~y}]\YP(w*%/'iq>_=ZQasP%XK;^U#L(\`hU3w<'5ksJ:qE+8c›~o y־' i^]N_L͗Y9UTMJ]~o޽G4/iZm {R7xHFaYh'|ķ,Sd~zz6g3ŽX0[xkQ Q WS.*ڋ*%-q3IVcܶl[&d}e2v*-׮!$F(>ܽfjf"}'ҡf$2bXޱ%~W3 +ebcݏۿNw5| gG(Ѳ$ ˣHfVBfLTBJ-.jpdCw?}Toso00,76Ƴt` H%S骹ϲ 3K9lODЎA%o: &S.ʫw4EI=F%DI"V"n=W2Y J%oU #bvrAmGp9sJ!P f(tH=L-ҍ)4}*(Q3A>$}ɤϕF9dz?~RH-;njT Rp&,xIѼ y<@cLdױ sjsh* R(&lVg%k3}]\Yceje ^VdM [B >L2泵,9Ad|IJ UMV4=,7"tw%B2=6|Keז}oެ>^_槟/ 7  Gڭ ze']EE}nw {'z-5$1Q3'l̸y{Y3ms?+Ձm5(kB:TFVܭypT&krםQuN Ŕ"W<)D;KвD'K|-,j303CxaT\6ul(I^,"mpk LdY(nj·AWfMY/)+RaV9Q+`ܞz@*wCR?2eo cJgz睯|aV{rcR=o~|Ϧ*iFwy-}K֐EKcu%JB?./M!\pz`=`gG6`d&{#G_+܆j$4);ƕ|I&K*LVB #X&^bgx~U;{\)dˁ`ܱCfaݬwŻt.`xVj6r*"kK*e5*1Aij7pTf* wN(Es,;Y@%DQ~V5j2~)Hj &jFyI =gW\VJtEr(!Ty QUYs[棒6:F"X)p 8eu~Ȱѿ .nه<\\||nˎκ<%fdwx~8MI^5KjLR"bƫPYr55jŔ$D[T5fD`Yd4P:`.g|K=(N)WtN'VKJ\GcT"RE0kBw=[1֥CyC{wؗ8 zlk 8_#\tKJWm^]% LbAimzq'Xlr~we}؇s0:)eT!׮1n$x [QMj6k.#ɧdr>ڍ;OZG~}/=SKP70pۧ!.uRhv쥛X&mƃ&N|um0!^%L~GA|A}_ӯ|iGv5a{aS@ڝWŕHfgâRm_;ln;3)-- +>!о.Wg_k ￟| t.%_\yfUoZKc֜*vഫ;u'Z1 [bOG-s&sU? #\, .J2h]NiQ|E,ܹtpCeOo*BچAsu̹6|H>^覝|ٻ6r$Ww8×"Y`q77;{Ηؙ D7,p-/%+N+6Vd*>U$xw\鷶.@I~mK_-ѿ=/qawo뻳3G«td/bφ0@5*Rxx †Unu  C?qyml3Yj5©#DoGrS*~rXP:Pb^5SI9ws6SC-CvJ4RƑ6*ZѥT3|ɑ!ݖO#A] ]V`V.@;0o7Ɨ)Vu|# P \^!]k]v9v8eρ_A'֓?0LO!LQI.xҎG6Ek#D1b9 N6lgpܾ&8'SI[VOZ-bݭrb=l7+ Eepg^_E~7.x1ɚQ;W vPз4b1bHSW3G "ex6*LDlI;튯Zv",秘`\Vg雵wݛkMw6.]^^A2%]^aӻ48C Đ&yQܲȹZB!1Q]g_,p$f)zZI'Â%Lfֹ"t86RURC_L5NJ-bճ'_#CM}WxZX{sP:|g\Ω|GN9U@6uQh3X<rluɱ# )kqDs6 f)F F*5%a=}B)̨#sɠm.rZXm*%T]-s0q19[R%(P)&ouU5m*uL6:d9ߝ~C~CM;R)5f<<&/&/{WE[dyOei˞eX.h@5k>-Qų8DW/zfE*R<{Zcer/=Dг[r~wn@}򋀙<_zXg ݋{ S˥ҰryW ˞CpYo8uXIĺ"%@{H!Mdc?|5B=Ѫu7|SO<;2mgͭp@ˋ[^vWkpm{?j>%c_g}VS|G}@J@'~x>n䅵T1i*avG-4_nl[jl#~ygͮܟ%g2Y{V}C&ξBmn=遺ܚUZCQTQm֛RtV[] lyOWeO s4 d'%lݿ]Qׅ|G7J-!>3 ڬבw|Fw$ٺok u|HTa {߽#mioly2<"nTt6i;Ogҍ<"O{8\Ǯ϶= ٌMZ\˖LGXUƊ}dDӕ=pd&X"`8Ǟ>k>C7i0[WlJU##.u1H+ G)覤t[BB(P9Eps*[ /aЧ\_#UcI`ɞb*>hC5ja&+pf)58#4:\ tJ'uc[ђb5sgUւ=iECZ}28z73\0crL@W4ʌj\)0ɴn=/*(%OSlJ7f|mL6z'VRcM& 1,q2W<؆K uH t\R>X`SA"NzwkRxB jGm*F$fԱ#ұ1UПz;Ac8s i;⿉^:PO,k K^cڔ*,]O=yN1B2HPC .j6dP8WF/Hcd| 8<4 h\Y}ܷճ-VROrն2%oCG[,1E9ep(I@Zr`_rݶeC?(V لl%dNP1G9"i@o־pǂ_W1I L(JNɡTRC9넢!񍈉JiTH(+-( [(oރy8 }.k|0}г ]H*CN ]Hb䒴ŴD1usa242:Rjױ\bAH|{Eorʓy?vih?Y}IUbr9k7[$_Lo\ybI&ZױV)<w-C '}̥R#,ƻij ^ u<+}ne,EK8(@ (ʜW[mtK>˧ObN4vg+N 3RX @-+qc>6xvڿ?S2I#T~(~ۺ}5΢1A=- UJq܉U^)sյR֊ײ5g[}ZڇzSˋ~ WœwY=z[FDˋiv5<=(IÝ-m͈͘繵(bټQ-:5|v~36gӋ z{ꦓZWqjuFGZN jU_.~I?'z2_%RS8}־|Ytکc-}mKV܂iV&e'lu{кk/UP^f8OhzZw?~lߟoO/Km?}/k\05ڢm""#GM?4ڡ6M[}Cz={]?lF{m&C<  dϨInsM|d'o(71RQdrK>P9*0Bً=Y}{v8lKk?1vbѨPUV)Z "S(B.)c<2 Ldk3 ci7؜8v`|qWgxnê^;<wW3)-]~u ؜XjtbKJMr $72t|@O{<n9%5)tf`'1B@}. (e#Wgru4xj æ^蘲o,7ٲw/#z$Ht80Y*5; OSD|DY8ڠ4TL.z%d۩FOg9͋dQG^q>V|Jg+J,%|(%Pch]^%;2M;5i${Įn?Nb7!cr}YBPH+`ܯ?t ( 5OJJ&{Ln>OGnDרTLq(MUm%RRl?E1ړEKڈaBB?)#amxmAbG`p@[j`[G[xP[x2!gwz'.NUKWnZJQlZȏɱr-IJ.HIy49,9{eEif@L,vA%V!J\b&;g冋y,V{q$VhG3Rh[FvTP&jT5]t$4,Pxd9)a8rޢR.#9|l{88ީo!0El~<"--hw&2fIkDw ޚ-9q@(薋QuPQ+o"YfgrɄHGҸQpOȈQH 5?KwŸucOQAHYie CUZ(Y x&8L;l+&s ;Nb+wп'q܀1\h(`ViMK ]zRȜU$f*uqi!+QV˼srTEU,Px@iWaӎe_(O|l"'m}*j(Y!PܒdZ`njR>Y9}'xEKbYkkS_Vt2K?.b+M'mN ^7;6e!!UO\dxO^\m\''C*UoW:46#$.Qt3ܸyp?E}I?)p;U)p|ZRQi$Bf{VU.mWu\}%]`'иkBiE{un2_A]QW_"䓳 )̥BkD*s,rN$',y~l;t3{~yV| ]FnkOk5 SP_n?MMn]oMu޹)F,ට.Ӊs#vzBL~yt3g-)-^ԉuocv '7P]Γ7>@PJ4qMuVHorl_.CuZ l~>u e{ 30{J>o9'i׾7}':4TЃ$^VDy#?zv;./(Nt\ɤZ_&`tM$v@?`]'Mjkta]S  )s*}bi?WE7֬.r_̗ONf~c$َ*N}t2m]s,r~IۘzClV\pɫߟv&^?^"t\,ȮVм&*{$2Y&P7|y% |6лYu=JIVo'"u_s} pI9 {?Y|!$3BV@eggii,Z5aH2xW7+tdoS_oyTO 92_fy0f^"*EROK*rm0Z4jf>ssgJI6~}afOan*^jL)= 㕳3Z\J U?'aȖjG=Nʚ[xhwsxct=څ*_ 0FǷ!Ɛ' Am ̫f:N+j&.nX{Ү 5S`;kNyz>5_A_ GnPyp/ >7qr=ǩj7QR%..je NO Ж]rJ~0eM6f|PGkP e^}~bM yrYoc@ٰ>Wl>Yk.G~cmn)Ѥ Y,g yEڥz&{ [w;Ӭٺ' fy.//zfRR .vi>+S-TJ7jz*D$d.uZKϫzQp|^i7F:LT!IOhC*7v>'KWxp2*󼠆ӊ2*օaYsU%nEe'~qMUJm_vVQǿrx ΚV\洰C]UniQRAmn+U)ӆCvWhbok[Xcu5n_1h}צ{iՄ}q4SmBLDSm(XhPeLZxSmRqmSmqMTDBBE++M,Bچ+Ti鈫J2eWiys/\\EU,B"x U*6jRB{W ؈xprWq*-!JsW(ă+k@9q*8w5D\n i xZ=D3wU;?w*8",Vv3w][Tɗ+mU@͒g;+*hvL n%v,۠\9i#S ,;\P*)No?9;K 7ɴ~PRWd|1]v y3PAvP0I͢׳j=qMԽ).p /̝0PWiy(GmB ikV]wx_~0L ‰rB .!;:j7 ]oOʂGVBϟB m+r΋TV6U^XyB?dK}@ܖ}>L n#2A&c*1 jա_aT)-^4T;,z&vH+|Hd:z@]=l7{/uƣ|uNS7_-/Pڜ~`N/M8i7JTd-z*:"\QI@.4\ZAI F–pW(WXpj WW @0'2\\uj WRq%D+ \Cb=Δ T2] WRcp 3 h+TBz+4"\`Y4BƂ+$! J=ZWCĕVI@082\`+TihNTL3Dc]ZT`p EmQU)7J ܜ%=vg&ٳtެ<{Н۩v5ϩJ(!zVo >o݇8VjGTqSQ8TQܛ+;Kcqd޾V-d뵤(ҊPȜj)Rg ei#X1 Dcx?Fi^uϢWJN;eM8Qn*w+WjC: DG++t,BZ+P)wWF\ WhxDB'vI.XWVq*C[qu\qc  xprW֘q*)#+a)1YW {"\ZC mJZW(بhpr5! ղ!JYMLLD+ˣ:t\JiG\ WZ%c2Ai,B]J3 W^F6 )J2b3j/v طנc (E j ҨҌSv4FƓ9ۃit~/u}50ĜD,S)S#R%ٙ'!JӻtqFa'haJˆ`(|hSbY!y; >}nrOM"wS)F\ WcW( O=H7F+Ti别S!`+,m4B +Ph] WRwY] WJhb 64\\ @ 6t\JFF\ WZ)cjGGh} *T;e JF\ WFrHDB:  P5HAȸ28D\Y"ζIe RV2ܜotzz,6c^?zI1AKRfyM`  xwZ(L;;'ZɸȨ {Luxnεd%2fY23;hkmj,G/[z>LW1ѳѵ3_ >݀iU;1}S J+2utRRJ.NT wJ~vg:p̤bLgɈ^ W%`hEע̢^=b@7?$ &o| S \x[ DU J!LWq=!~;\`%I2te ]!ZMMteQ jOf&pxI Z=]Yt:t g$DWd r"S+ʢUpeDCWL,\ܕE+WW᙮v$%2!M:sW ]Y b+dpJ_v?]!`*pY2ɠE( dAҠ$Ki6$WdA6~ueQ湫+CD&b b0ǁG BD M#`MIdhڢd9 IfLPH Phe=e}c8mɅ5'eڲO*6)(7_oK o}euTm'ښen*h*M)*D PjUKi'7GAӕjGo+KK vd|~>c4;|ӳO]wOWŸL1\,/_|Ѫ_ec__ucmm8ҶUљ)YTRtEIEEj Z}WކWw"x޽59fTדtaPO?U6¦#Ak6Ŭ]M;)29?}ZbVѣpg5 \\Th2}S(ܟ!Owr4=~-5lK?EHOFTa1Y})v|RjۧaWy5m(yd[O >q_rCk^SHyhlpPTꑱF{dCawq5"1 V͸J-m2 ϧ ;[xX]`?B9t:&_.?QYdapu 1) 8͸F7 ($4=A2_0Vا,JvqzrC=]68) qcoZ\~r&ݕp$yb[> |\Ig}TKKF#{ cl MuqD>ڂx-gjz0bZPCjڪBלUQՍ($@)Fw`36ixڦ؀u[!D[b6:6 Sezm5PWU0J֚5Z2Hi,YpU2u-%`^zGs 'apߺ K>FWa({ GAW4hbeBt+} PӕE 5M&pJ&v(tt 6*0S4B\$教vT1Lf4up9#$dТTv=FO'DW"ЕE*te-J]+Nd$.ЕE+!v(U]+{bMie:pU2S^]!JJ.ҕք\&CWЕE+j(5tte+H:,\TEEteQtKt%Yoxt 4vcvc*y,Zhq wEyWsa7y:0ћ~@|)FǗ:5c$puU!ZE E +xMLQ9T7&/\yG<*ɕH ^Y J)l@F\XKW)֫џ?W?8 R!ozܿMuUղ5lM ཥa|/XO+qRsĨ"ҫk{@Loՠ/WqkϜX{d#Hsnk؛+gE3'iqeV󍠣~8϶UZ_(j0OCT9yrR/.Ɨsrؿ_d}8ϳ9;o?:Q~9oLӻzj+)uQˊUTd5a~b: /[ADWe]-8mm|rczu:wX(z;_`>Qh wDp=Mۑy|}\lp ր4`}nfpo }tdukldeK m l*06~ZE_8rѲPuUWpЕ);t2YN.F3F؁PwO H5 E__pGsZV^oVg[ԼO[PI|OqɮN&z"w*+ϱmC,,]l䒹?}ZPYhFa=|`{f^e)":ދVt6F1GlJ|}) bs&g@ȁ[vhz;Ecr}i p S\K^O"'t1;-;)B?lI1Ԋ&!Z19FqrsXbA^ީˑ׊ O2tPBTgsvI r'ʡ'IE'Y QJϮ87븓ԬA^>KoW; Ժ?t ptR8U_/O JQEbjն* ,jAtK;K{MsFzcݷ;ڳʝUâ@2i1&0w kixŁNyo׽< HϺJfz&,LHfF1(dU7漓g3=C0dz¹i_X;Gvs.,ٲ1 x*qd_,<'瀞vpK9ȿ~|.N2/VÞ^M hij /+%D{oR)Z͋rK|2+A@mNJlyj8ڟ܁mIX0W'NALSTѯkNrϚ (<~X6oXV|\}@+:9; Y`n/܁!]Ϧb tU}9κQ5Թ侱4|.cWc/XSCܱkz՞ě|:ӎ;SUYF#0v}eڅy)ks$vl?U']݊g\oXɜnx|s+ʣ(U4A5uPQHKDٰbcddrs+~h.Pلj"ЭÁCx6LcNTMq (%juPEuOc8} _t6߉!v3 NzvP#C!)tr:QiY]8%$>DZ ,}VIxMv  :)E7,Ni-taY ogulj1l]bg͓4KL9;P Sdw͜?VMq_O?h!g3`ǻ۞lV}2y:GqA__u-@$ML g3eb4j;ʿcmA,XˊSoyKޭI$dPNHxϡc"Ϫ&1bG! ĭZeuiq-jȖPM ~h^Ռկ 0iQǟ~=L>Hq#U ɦvS7 q8Uhy/ p,/xE wNSiFZsR| +EO*)=>np&"U4x! .낃y"L3@eݤF~5Zx $9ԏhKN5{JZ):}6QpHYɗ"̳Xm_Ιq >G?LTłJk5~6ff㠷>7R'5zI1P˃|TvJ20}UL˱3|J)S dyqn3%L{@_U |%V?s GLM}^"`lʄټųi6rC*OOxI+Ut(i}a(' TKS ){"S'y &bA,B_3&c-`YE*'$BMхq!Di&/p ;'_ڗ͒0j/C'n{c쁰¬/.Ryr?._m&b%Ra$xs=V #PZ ge?ZK06 b7Wژ+$}O_2SjzHKT_|7YZ<&䐶XS "MW4t`<"TpY%s䅔a+2@Jl s]&"c"ݴu:gd2.g[&MWAƳE7w(LjN<4mX#sH ~`p٫U, kuND9ƀD2Fs B+DU rӠ{ibDR{u2mNDTéЄ@F %8AYEw{rk?a! hXUQdLRI-֕AOE n@2V$Q"v$-j Z sniqS~tAw*$"ؤVsL@Rs%XuWT ˖{IwĒdY]-s 5!"1$@*3.Б>YEU׺A]O&Xn$WtfNEBX%d.t99QRբf\6K!;p-?dp7Neԫ WzNt% *؀15[TܳTi]r$xJ1QEr*lhcE)]6z3RfC"(2buA #R(IPLzSZ;Ay3MiS)9aS)~o.|A$GLeiJqk d줳1ړĆ`$5BS!1B[1(l4}qوd'q qF{n歱E5(sR' Lk|l8k.q)0VeW!s5ke7ofiKN80wʏ]T~Lq|fv(N^/~I0E!;]!sjVÂt\H4@:?~їt*%ٿT(6dUiַsT⪚/} s󶨐btcًr68>fY_{d攏l|Ȼ[4"]$*>*wɼ^Qm"+d^UP ^ Fes7:6\o`AODe٢Įl^s 9o"+[煂!1r(/}W!OcBzi)(LFWa<}^pDzc1E^ ]Bq .߉R ė؆/׀lv=>r5NFi_] a^DR0 n Aes^|66 (G ?gCC_dZb7>GA_g-^HcEu%F^y cQ&  DXkF$'/$R%EC]o| VCܓ}&n y؋?HhHƏ.xꇁ"mBtTrTu#jom5 M8(8tG0"(xc ^%>aHu l3I=fe3óȄ!e4Q(]aqC"$>c0t\E'QXOo|Q ,Tw`2_TŌ_Tz@7d)xGi#1[k1k\`+Ä1>O܋h3nHx ֽ _O%]ʁrv6NXsi0 hxZq rϞp2WE#VMz0\Ro%!8u^;έhWOF;hBY`E"iFkJ0O>~iPUVQS(A!O&~F,KKRڣ7]&}`9g߼f/cy`'X._#Ҭ0`J RΤxԋNQN HCG=kECn{r -Ru[bWh.}PО v3d1\-Ԕ#ٸ/yf7OnY>U ŁʟMݎЧK,oJc@sRq ^|PJLGFwFB{ p,6w>\aJS#5xC=nh)xZy,x27t8UŠWԊFYFu-e6`0/ M&tw!uXy1XϚn~w !{UFVц@?H B6lX %LX>dW[Bi۔E^vIՌPHO.ny0vss]= N'i%\B`h=F$X4~c[MqS _h8 Gsʏ>i}[peK )g;oCE5B9|tzC`%C*\+"Csc4 DVBGZ`AcK% )S d5~qۿם+ ~ԣ4껕O9Z_xPylk:C=.ޱ0Sl* 0"@Y13]~E.]K:J{^zMq*<,9ޝ,\NBIgSƴ;"I$ccP (DiȩAV챢b<{f(.dRxzĦFow񣌤'WNwWo%]mN Cw8e˚ _rL q.MXmTQU%)j, v2-{\LN )ֲ{O +88YQyTu]{QFV"6;;0M)DPb)XX=^ŧe;iNJ ?0 ˾oѷ7sHm鸨Cs_IqQ PAt"rtYxM7 !SMK 6'# Pׇ*Ωle[6_(ٝNwR-{\ŔN^]uˤNýrfגqhrR{KFI(_թĢ^BDN6\ "Jm91w)n j`Ͷz!`sIqyAp}ɩIkݮ=io#E0y= ,;_0ev;_RQK$Kj[ >Z|7{| 7hĊ;FW^P`S&az?$ZFj"w"!sQ=m|Jkm%ڬ3 tBffvmÄ4(x_,Xrhs7g'|߹)d<81=Kt8d:0FeV?{ʒM͏Io@g7jMA>45kJsן3mŹeINIXsƻ7.F^764u,?]%@JCq`ڇXț2;NqaE]*gAUy 僙q*4*<5/u&Oγs E'玳 ^PzƧ[7"(L% @FNܹ,Q8|;Y"~-Os?[F8gEu+u/sQߊqG |dstPZǿ 0+LQ3CвWn:1'X9!a~Yɦ;cSH[?b*˸7T\pr&<:q bʆ;xq4pG *68y*p!|_sCRNKP 9plJ y<-)*GE:/?'pU8xy8{Jv3I҂IOe7m ͔;x%×) IxIr/; YT' po Q<0NY*H Rvn _eΕ~+9 ]>HFQ]*{ߓpN(Ew^_g5]ĎWq 3`>KwM= H"'\QN'e!\KDT'JU"vGKqV~. xK,.?>=ȖV7ovSo#QYs)QSΒ$S }Oir5#~w2y{sF(XEE%s /bZJB(Kq(Q0,)ˢ36[Kvh0g}s2%g{.DP,&T} CTsƜ˕*HSE^jTA[9ܘ R;Z !fO+[Vk>$ q.OdeioF!y`M ǿ 0 u%MgrhŔsN`9 dJE @G ߗ/hW&K90 8#\ZiE cE?q$Gpu{J_i)q3[=8!RaP:(]`^+h<, (@Z RQjdZEwI NQ7&-XTNc%+16YF`p(}'ʉ!Nx[֨׮1F&hQ;&J`Q-Eiᒶ~؏!j a*̶Fflfq(`ŒډƓM R QE0'-K*JZ0tPak\w0s&Fq5&oK`!hA/B:.V]LQBx+n=MFt9̀)^ڔuA:o0`+VNIFhRHr/GD؏Gȵ+q@8f%DKퟎW]SZf,{:aq։@CO~/9kJM?rW_ Q擫3=|F-qE[[u&Lf r * tAJVy~h\0V<c[s; K3|W{Z96 Œa/=gלqz20/y!$1BB$Xǒ*K5eoMc^ yuG[&sh1{ \D:fW^Y$cK͗0E5gl ȍL+PBoߕA9oS~g) iZm9㝬k@Te }cgFE b/XqJ2SY%B)`l dTs;TD:Хs BV{1{q<+Hq1FP8B \<ĭ9cι%Jkwj:#sں{=GThL4"i/ES]\&}tFI!R8g(W) 9c (,F1Slf*? 6Ni<֌)L[sĚAם_˸|tQ>4-]N(rst3kqy7q| 0HEw[Swtkn3m=TdYRgY"e*qa$rB2D밮@2n9OU~B̌;' I׼+Τ<ކF x.Z<$JPm`7Li\G*m޳LBac]"4A3Z:ۨ|k3>?v=h- &(t:l^kjoMտW TD'\Agy}ObhaE(TB{0ޙANޙK0qA@ahVN).LU^bx)'<'b>bK%TL0N֍{2u~@:о/.3D 2Q2̾8X.%q12?O1,ѿe)*TPN4aF䓧_.ZbᜐM)\ 4l rN + A`c3׈_`*y%10XfwCe+5^_8\|pVGጜNeɼ!ݚ>[Ӈt[CZ,QHix\@T"tt@iRH8&4M  %U8~t BiѨQ"$[-/0vi$Ql0˵K<,' >(a'j֑#!Hxq@FRI&}2xs.QQjjAD N *0@d,j:FOxG~T-Ya[#P9蛔WIJ=wV՜q0EQ,d3?|,&r.)wu6XBlC<};!on[Go * ?Z-y{pD0(ev rQ۸\EozT`s 45B/S qfxGOg`1"]>>5䷃ 1~l!l9etIkI~Qi}q@(Nu<{nME gհ |"!s)EڀJS9 zҭ)Me^QEQ6Z)bC+nɑ߬ y ~{V7n^Vo]H3qppT5ㆻ:8E[q8MGnFRiC-{- 5AZ*l3{9yHzZN[p af ,WnWӉOLWa#^:!upxnm͸R<ѥ@*ҩiNu w ,YZFRK-'곷rGDz 7eF>{ BQE U I0b>\,F`G߂-O.l_Rߵ Ch/kYUX3vS g$M^o~Zx|hT]?YdlآD,n17ުy|ʃKaF6ɈҞf"JIwT…RZI{D)L5# h\.9B-~IϟW+T.ă/+W,\yERb b:j/E`*N,{>H/28UЕ '{kv~4gD^_g9bdʿɏJ)*b cJ_0~/\RAru;Oㆢ"yUk\ů58 yv~o8(ktw?~^pT?/G,F~!;ZO_iF5~,:s?EM:Az,meCхtA|Ngqx~s3P7 <5 J֏SEBD\)xѾIwDYʘ^^⫐8Fۅ&{7k43׋ݣv)*lDy&Cۀy" }FdX_10W[݀FdƄ_/X*r-GS_~|5 b=1݁Ƭz`h2!8V6D07:'oQ[wtmB+Z]YUJwiauo]ڱ3?rL]Y;X6RA {5tw pwzHȅHȅHȅH( zxό^:a'eʤÎSDS%>!VRʨi nhwCsRDF/߼]26`}2!o},/W4l\7UYM,-wҼ:7|͞jqN~ i0x P8b2D_!qѺ;` H;CWl1YYۻcF97ݤ".C'8J+҄`GWX^|)3 bb !m5g.=ILy](ms?7 H\s {p'יWͥQOu\tDIPՉOWҧXYkºpѕ/+D1=xe].|>h,= 1S,2 +UH0(wZαS+A}ڲ1?nPpĸڧ6< GQ21J8aHJ,[O>R&6.KHoV.( 6hnu}$3ӌeu  'Se$pcDqH J_'):V ނVu P a]Gak*N+mJ+RGU7 Uas&5uūnb8#DfXcT8cݭ"m.m5_=b%d*zt}ږF9wfHݴd8ꦌyrqzAyåcH_>)f+8fO>M1Cbf5Ca^e QĶ[xC 5a-m=oe[`v8'V4OuT+< qBw%pwC@jwDtՖ[/ۂ1v`1)M;qmiqm-"L*Uݲ{TWP :gl񒂇"=ʃc; 8\v6wqewo]2e}hSgQy3 I,,7<+sݢkedSR?P4A'-7+:o$Nt*EVqZ xާ8oޤgFm%ѰmS 2c]ugyC5CYtt R+dl~2_}ka]5nt1oM3xYjDjtyfJSdw9>>q%S#yzx1ǐoֽ/3 ΢AղL} cԧZ6mQjktZ] z g[sȮ acGb֟6(u$: w* 6>[ADPo$>hR$conxG##d2ܿW۪ FuҠt92}b?^eA3S j)jP-d;SĄY,Rp3쭒Lukz99U|x6mIcCz,nِxs/{f5⃲ZmHDrҎ9w3u0EunT]Ec84A*OwϿܑ`*碗JHTb7w5fz}y9em(E0d핤 ݵ ڑhG0u;EևC>e.qCҴ%Nj""H -m+[p=^<ʤqn_V !$<Υ*jS}*VNME>Z6!i6%yW4\[JmO[;t 5oY?)4/ū,d)%vCl7:Glk'G0ѣcƱLЄ1E*vCB(* BHz$)]>O-ݳ/ȹ8>&='`ԠpCnk&032e`OOqbS-xHN5'[G;;ɤ h[pHS4QaOS\'Sa3hmTgC@cXzAe/[iC(3y]E *ennTZGY]Y7>~2sO⊓!˷}%rO%u>*j߮y D*s8[Şk=C`Ǜ2K^c Qv*˥&k AJVD1ܗor_h`ܩډ8wWA?5x;+lDMlݛ=2P121t 3XiI1O-ːe.GfXke8x=zZNxةOp_iZ1:p6Gcg^d ua_ % Ҍ02i9U[ ۻKM?fgMhl WE b:"~fB*#r]:Y=4$]G:'iۚQ+Yǂ6 >QE!bͩR%1i.]H |C 62J"]kP+4t ocQ; 4c. (ؠ$Qަ,3tۅ(.i6 wj3xZA/_|No~8cգJ] zd`D1-u:}Ňb)Lb'ښ3zw`10~RAy0 @{'+ش䧯j m W `vҙ4i9e#K Lx0)95IۍJ^ezʩO<A 5=$N>Al`P<`̙ε|\vQyA:!p-\r_m& i>ti/o"͹:֏wiL*T !2$˿biZq%Շٮ֨Q﨏o;Lb}^+vfR`RsxxafyHJJҏkqktuF[ Z)'^j[(?z dJuW)'I_c&|7>MUKN߇ZaZ%36Cih-}gaG1K-st, #.lHpxy1"" ~m;smvvq<[\/S5l2#kL.ud%fsV3`B<̷& a )iRjqM8.Ȇr֣\PB@_UO)9w vģ}a6nRw"Qi_<fz> b^VVM7z(yXb!/e:y#!KO=2$"8bAû=ye|r^/%+s6iGA_(eqŰ#C #R/A0%%ɝ 0Vo9[2 `:v+_u98i8B!z3:3X[@f/u,C=W,zQ$OI! b"ExV$sȏnk ޶5|Qp487眅/2m)Y`cfZzRE8l6|%!z"&'V8ԁ0>)UmiguYJhDq]@|kv}-`aGrVImݻD˶=EkuęTD*{U2=dmu@#9ukIOr&#,FĆݖ70cagJ8hFFx[}~;҉Wd%.74em3F]_ar~cO7`Ef[5Co`axQ㗪]vc7>{\c <}:&M/;N?o5wdw3b1lrĚ|b$+0ŢX){_/ fRgAuܓ`e8v<2 + s~QRBvg[%e 0"ަldڸdg=.IvyvGaVum1j rםxF^pz|S2Vr 0ާz/N+xx;+g&]=^*REԧZnq `B Er7(Zh[gןb.|cm}--ɵ4-UIoBS5^{?.ƟKg|{mΨ'4cz\߻BaЍ;/b:&ہ RvcHFu\PX 4tt][iì Z6 Hz0e?(ǚ և5>uBp݆@Kû<-"7W{h@W 6Cyׁ3)^̡1FV"Дn!-(hpf(M_3ŝ/Vp|h}Fᄚbl !Gopr7PpGvk(A_}PkdjL-*p\(iY%t(i.wZ^*.}hq7?%6'{Kl؁]Q J 1~ّayuDKI?MSd&L[:AD[efsT9ٱo` hE 0ZsQHTbV'Q'5o ^]-6%<><F*bVPM.A<dbv^mKm{z rlmGyf fc7>S')IBAl8w jŔ>޹~+%he4'V!Wj-UQ#==5`Ӹ Ʒ؊{Luj_s{1\(͆ٿon\d3Y!<ƍnyZa@&+ }\Kܳ:=;58y<94;V)y-mhaz{<vClvAE I6;R)VAJq-29 |!A4x,Ohvf'>ZU;'4 ]+yȷ} -(}soǹ#Ճ;ܷy=T9Ue>o1c`" rÈŤ ɰPQed9G4U^ \.NOBkAg~7K+)ippYdžF}74|?-7 ξDs@XxT&,3F\|u$&B#&Mn?9C`:ǒ02X!ZjW; !~QRzT\bNԻxcǤ$ ?O>•;~{us:[36Բ=0`MjV}w" m }^L&,} Q#Tb0$5a5%5,p!=_-éjcj$\xwdz+m.sF#`Ȏ~q7le7,YS8)p3=UTDS%-^k^b= [QP{g '3`9+$ecГH`+$L\CZaX}᥸98 7?ǯ{QOƋx?}< SdqT@^ⴔ.OwhL L4$mǠb"VYc)p\1 ^)۠@*Z3j7K2jRa"NWثi\(n[vo䜅(yN{wu$ pacNXiH@ nTw,ljdI$%<~@qZ I@Sla'Vr f@&\c(L 5:Ak]h`iƶama P(,yR, eGN*ŢFc Hpf -8}C,]`mY,r,̵`t'WKf'|MF#j<9Oc<ݖ,cGYhׇѯwH&rlbGӋjvQ}-\U|RUGGR!4>؞*?v'`.Yw/@hm>@A>sr7[L&2͈{@|)-mК; y痁wgZYM@:V_] 0p D(ZsR$b& )v)ZzIL/JjcvU-,vH- AsΎN=N7'n>2ih;NV3u%e5Sj-\;}XyCJ.46N&D_ĭx9az㸕_X% sr $d0x.+ɱ-%MkGuiD5jN,uWUP輈*:QYRh6*f0L 5(LJ0-RS1VSf<*#mbat ٳ.3Iv%3`SXN;(nPp`P5}Rnh={#c{̰_tVzD~L_!iԾTbzMϩ4ɖ`w"}\ϧA3yXguߏ?<ZfuUejl*c"bɟreSTTJFJ7TD%P Yy1z3@,րEvpc+=$`Um6KI{ uX ՁuT6f(;N[W#Nq-O$kUTՈF)ekoZ1m[1oVeaXnX%k l |>%5joURZX9+7j 9Zd^KJRR Y {Ϛ$V5ʒkH%21I*`5,)1d4R9?c6pτķVc2fSZS)f\9`3neݎA0Ø_xXd(wG?nskHm5}LE. w+_Q){KÒ(J,Q 0*?FD($2HM$Yt J>--'/ ʫ:Y%3Kײƈ" s3!j`;C=TTb;5BxDg=.*=9Dw!!nPǺ"!1ËE)hRkMw.ɫqkH_>x MG ou3bA%4Oה jX3d!(Jf! ״8?κWsF9 }䏺jC~@IY;L -[3t ]]eq5t@aSe%*ERK;P2O %;YTK˝LJL1ŶelYӘ4&3Tx`b|!af%VdV8ֿ@t.U jV3MTUIk۽*1%G7dem44JwIe_ 'i.3YwYpbvfKOI&ud.Hbd9VbB*12Q\UZP+d/'!"0G)54PDV9 KCNw"ml0eOg?b).Iƅ膶,k sc0OD54?2U,WAkxIo@&9x~ƌ,289oZm[kğW}jlwFݵAN/x499eN/CڽQcwryu}sٌS%?Rߙdz3n2Uak8z{yD$w%;x-Mkp1y&5d^׍ OZ$g׈S&DxӖvӢUXgbAȷqj_B3n"M~'c𩲝œT7ҶDFe&H֏F6 'g2p:='y*iw=yGHinI1]c6oq k/+d~үm8|vGkmFqqkSiuʋ{{Ddkϣ'وQC˓q y(ض8 ?߯r' _~tr}|:1_;l f9hs90j_.H>>?4|'gNE=]ښqy W,,<>&.qۻ׹飛dn1u|+ݎbƅMI{}` Z):)|Aľz2}{:h}r_5_ <~?ڞ?sBΉ̛k wEx ge[h{~B!VbyA<9H킑$:J*59uE[&!h0֤;繌ꊙw B c^y&t2)\|g-[jirM.X(Ľ·=YV OcNVq]=z>voH:ituO[:j ' bU6wy,p`47sbܕ߻;(1|E&}f\P/wM^G"8٨ErͿ d;yz2#1ڸ\SArד/lB]C KpO^F0/$drA A*'ecurԸ0KvOW6\y%9Bg ?8. +Cs~vx|sY^w2$1٬^):i :Fk$\=5y1ψm[=A!d$HA! Bi͈f8y,}+v_cN.mA˺UllYa3V{&3w2q=jyƊZֲ)lr>+|PmCwlu;/>}:-UZmHe PLńX#d;5VʥMo4N},^V >  f,7\_esֶڞ@tlcf+[P6+"⒎+!ɦkZ;Lq3UdM-4>AaϬ*Plr鼿-!쪑#e9h6gWHJL Py!A8&8%b DC䉆 'AըFbFSږEݧ%$MF`{-j pmb,D%[Z-+9)J4wnUTLYTRrH0v_ad̆tF:>D2WjQֶ+o?^c.*JU_30f@V:D6UGʗHT-(cX_Ѱ T!b%>yH:TɭeEQDjwD:>eUlH<@jIB1k̯5gT`/"$+oUo I.D"cn^ C~Mo޳DO4;3 ߹wv,"R;D6ق7/K1-MbnQbwZ׃5k8EK, JD?lVBOBg^54=T, ;نTY#iFg-]2'd5 Q1Lf1} qF=nG% #<}$Faw.nk~/\ TfKKjB ɬpuvKؾ4fx=ЖC1/1SEjԪgб)Y 15O]IgKt ̦<3pcaQNxևF튓 u{_e ܴKֆAt+~T |mvBJ$÷Uʘlai4LL@a:Zml.Nt%҈x&̔.d|F)P&F.?}auOlX:/-{a7vkAófm_vx=r͑?m#8E.o.}#W`{*q^mc&d[^m[5y!) HqWDMU{i+7aџ_/t_}U6Z2ݢ 2N~h -$7-N)xz OJKABR#dnrMA~H+S8xDGqqZ!g\Xm7q@5yNI8)5gW37ԒIIUrwIJj*"ʩo󤕞G+G+iT`:6t/8lv2S8 d%0S:\E 8nQfR@iޖ3[:Z}0TC4q$׿GSUUcw#tn0ꓤB4AJ2`0f!r3m,q}Zj!9f8ՙ(ܖ8!RҜ}O)=u?*{Ye@4v&?t[{hw%"SrD{DliHQDMӜJ[iN ?^y6oAۄZѥ[r٭3HkR 0|%כ&r{1b8f_Fw~&x}e@]1/Mc Ctmbh!EϐhavDZ$ɑDBcLIB]_'@NkAn3B`uL|d-=`$ [MmSRKT!H@&93~oy|# rD] m%TȠvˑ!cRV|SC%~mI;l3m<1Ka%~x#߼YZRF.q-mЁՙܧHnJ"dbǴ ؤcyB3eb/hPkTbCL,@-%w1jz%~Rc1wERY2ǔ_ڧ'fڿ!1(iGmC=U&S66 0eغzbOs&C1 9T5.%qRz NAyE''ѢaрK Y,#r0v4yn 6>Xix[3t̽nDU}a(duFz$]Ҫ!VPFKP2Ep>Q حf/L5d{eS378OSQ&vb擪KvPw-A6GcJ\6sJg쁣 <:W104O3;W'58 ɇ͛IVH!]շQ΢K΢wGpp"4 4)àFZ5Qaػ@\8yϞd i!̵VCN9iXS5 CH8 pđ=w, =fYøq[a0^r&GľqMD\׏:/ι{_⯏ `9÷#{s28HK05221S! )wMq:|u݅jmm÷s_p|K7G5.UҦoŁv9g wȭ~ӛ>ad5ۨcA#nͫ[\°Ö0]䬭y%gE?Ho/ Y&!ĵْu.1hܒ+Y"ҹfIڊo)з(k8?,w 5duQc[Ö\KrMvkT3CZKcV4j{A{d^4 Ftȁ[pµֵ o_9 2Gzd~٣!'s-٣YΒ=d}eۦ05]7N|/3Xs +{e2xLRUɫ!O@fۉuGi:fF o_S3f?r`>^4-3ӔBcWڂN߽Y?QK"/g:g?D]o~<ɧg?^5$I:VZ?xYb`$#ڳހhRG%Q+z+"muXIUyTadQ KBL*a_Z)bwXPq-ׯP ܩb"ݣaOG Hڢ l*pKƈ㊇9!V:u-H0 rL#ʷp˧a^٦G3ҝ}x~$ȥ"e\|1It/-h!r᝼tch$LNɆ[OY1ww5ɜ:sUpb">k*9n x.A==v?LDLIfn=O^r6=TP@hnmŷs;Wmn/FnV_yo+?$s5b%}z]lj_?pQN4WD %޶otfvjlٗڜaxa_a^ge4s\?~|z"]$k-qMeB0{3io\\\p٩m"8~FYk:vfw秛ST:ς|oW se5{q>&)6:v&JM3n!׹Z14ZU_24}Jja挮Ch6@@:_~hOc6;_W^]Esk{1FgwfZv~N=ρZξ.luW{?F獃xCzs y02=u -!O] 轱__c|-3͓~z#_f`;m>8l,buN̓HMI%t.hrd4!%Ğ[ mmH]$hN?>fe^ip0fS*y蜤HQey{E]ph"Bf=?Zn728U7( RXTqyTV~>Y7Eh;7tΤ8Ƶx{o9Ӆ4:!C K]g{P?mn(Rx٤pulFNZ%#/sqVuR*yM\:5@=4(-`VjUB2EL.Xv>d{Ylso~~Bu4fq*Oߟ̰գ8B#qƃs"l3vhfzlBC Gg>Oc̕MCȒXhuŤmaQAШFm7#kI33adq8?L9iRviLK&j*TJu OBL=bX (~1^jLsQ'a]Ι[3N]5_>Ӌts&0x~pM%jM&9LJ0(yޅR*Tٙ'%!L}${#+>%Gt3aF!94t3cn(`nJC!Ebsdb#u6ڱG)cL)LdmDܩle*)@JrBE jA:ʽ⹷ȡisnTO~M5;܅\{2|vQ-3HHͅSr [ =\%Xv*7j inmSo5E'cy'!:7NpSԡdn|j!o)q4åe4V8pf5m3#e%ejCW8.q4 ゝ)J d;}A 艞SXRGGKz Z}}#f1hL'7jjkI2`y$p^pj&^9 eLGZLt-bIqd7oI/\&LkO>ݴ`rUdGرs_H$xdwK06R3ۋS(ξkCF aܒO٫Xj/`~̀Htp |hM뚸2D.M Y+vo̜w2 iOUĢ^fL ѼJ5K,fhuWO] P{/AGeus{(O*R(%XVIq<2*os`!Lf`Ātوd#7$^i'"YZXGaX3I6_1MF) .TdOc4 ϒ.>,$[N:7$ƭTzpd?Nv(.mWa’q4T99$9"mfj̙hh͸d>,@[ԥĆ/e}KQU`\SOk5n~{_޼9vk给«dww33aTh>{^z|U`.T>j;{ד:ɇa#g="ƍh$c\f`ȍw&]ǗQN(ƳI߳v~U+5c;tYd!3?$U! m; npl_;?}7. #Rzfɇ!أG7vDs !7jr֛Wu礣ߍ8wM>{qHGQCW1uoȍwXhTlՖٞugnk%M`KbNIzVb)18J6KqV?X3r'3{Ŕϳi\3Ecj+fnJ5WMu<չ '*:q5)A޹aD 1õevLdJgoǙ$]KsW99flr:JR!8[nc&\c3s^i-?%kp #_ ܣO?E$T,!6ţْW_70E'9sB4!-udG ֫XJ9Y-5hrw xu?nMr&-alx#"rA,=J\a~4۩Vu5I R3ZF6ءv7cO9Hs/_+C'| fryl[xobnذ(R-RTFۨ[|M| ײqwkA*cp)F\XbTc3ž!0HПp0!&{kth)^%/OuNUʲ;g).eDJn{;:8ĐQq5'MCZC-yql]Y\PxZߨe$_X?=#;x S.b_-b+jٗ'fsRVaF9vHl3: CBVF(r8f#lЕDZ"֋ꔱ\c6 ɧv3p' rn竺 Zv'bĖ!šr^R2E#TUJL0b&-@o f!Whd} /1Ecr_~O);t:]kkQw} Z-;%~OOgl-6N²V-vN8$h6ؓc)FeՐsqE=7 CfUVzI[8>\j/BqeBU qq%k%t R`R\c}lm-1:C$%5;qQ(cV|䯲xi/gg8/?? qt a,yEZWCLITC.h pIG踎{`.k1Is+q{F(` F$rRAtf|PhX\; w1i ]y;Hk-"ϊg׬zhD#Ŋq^\.nDyC05 Kwt+J? ɹќCn\y]q\P,ka<.;N Hٞ! 85_ V,Sߣ#}h電7N -!ZS~nB\> fV~AhHߠtAGa&On!3g y/ =k-õa.Un(HՖt6o. OڽIjO1N-Q;Xaڸ=?ͻ7HN>wfץvC=9vP@CcNJbq&Ln ":W'psy~L5˸,s˻BX'K},ՇK?N#fdn@N1~W6noN&$S$x^<;r XU9_xէ^"}tۇA# 惚hCLfl {W3nxIDrBzHhP1M[Y"%'f$(ȅ2u;ם>cߐ41T+OY|ᇩ;<}YNP;Ȥv0w5&zsAI&7I8;vD tC_1$f[f!I힁> IAAY~`{9c;C#&IP$<APG۸lM g +֒t[@FyA*GfiWirD9p{T?P.+glkrV{gs) ݫ!&cbX%rHقBYejKlO3`hk%ӤLŽԻT@+S^ed^Ywtª)C@:8K/OrMמJ |V;4ނոhY}/,3U.Nc̦l-wI"#Wp1T[\Z+E|9tFrClZI෍x&[*ҳ:of!Ķ9r*d|lqX3TF(s.1"}Zdf\O#lqLVH%t|0 wXcTC WQF_X&5 KlkX3:q"+j.ĉ[U13yYo24GYo_7:O8H3Эu4(xf`ߜL8;[8n}yɛkf(Yh~|y˳BJsv+S$ctvQ9m\0/Lq&1~@+;Ǡp<^T I-`|jw$ >,uSWgvFTfs)&|Tf&G-XQBt>Tr؟5^ͷWcKVD 7޲[ `̽M|s^;߫a[ګeb⸍yx&<.d^el !ScrbcCq+cPyr N@90prbgrQ)@%b 2NloWHsCns`?]l4Mz"+-iWsɍc3 ~L:q#>^9ufs |f UU4qk8/n(t`Ǎw":|:.LFԧY ctcM9&AL3#-'#(Ms16 (!Fyon_vG/( ,ǷC,|pK,4n1}q.ʹw]PӨz2PcP6Cg )x؀j: ]<$}nI<%)s#Ifݞ)>2ڿ:3%Mfs.Uh>9 aI&7Ff]2|ٺdx]]2.Y~B.E3IS{yo #x'm;)C> 6b'$iS$CR`&IecGU+jn-G6ёGhŚCq14#5(#aPM=Z&߲t#[^}!$W%,Hht٘)qX1I;F]Zf5c ras۹ )r`$s?^aQ% A4yG;gi CnRpAD1f5P:n=X`1XH#/( lUcnziUó:&(As(HԀyPnZ2 FP΢z#Br >h2REyRbе}؀]?zy{ɝeӎ9&aok"QlMSZ崃EpG_B:/ڐ`7nG%wD@A/q4y\#RZX"y<$F8[@P9u*j؛戶:-sae$/^|1!\0"d;Nܨ5iO0JDs3 ^ȩ}+]VVb.JSݔNܘŊ݃Am?iE~+gBlvJZC+ʚÌ՗@Frf=,1$Le4b,#!DC>H0ۻ@ J{9X( /QT:t42`/2 qTu f,KT)No բ)!eciP\7}1g+TcŜ0!jLƴ["Dnش|馝 c~<8D19GO0P0Ӯ1V>I11Ĵhx͋9BsN9^l FOTƴ[!mLnɜHɴgƋ>̓D niBW:4&vVSB4 [wRMlCqe6s¨\4(Ӝh-l=Z&42y^J^p|ֶn!g7Xe)NldxNQ?bzwǩs%*ԋ~Z($ȬW}?^y7gr( l5p -_v^0Cs|D*̬]_޾ &C@FA1XT:82ڄjgT?]/5wuS\[0NcVM<a @?:9RPIH/>I"xt:䄨o7 3ńu@-QHu.ر9$70I4{`_sXư+ۇ]F$_6㦸<;ŭB&'_v Ϊ/rE޾;@y##H)*@ƕ&yYd8˽ &ֿ24 i)[[8Hk"@\ X ,KI7l o4[9S23G4 jڠe'),BZLFmUVt[H !h3 }ξBp`zhRXÂձŭD=d8ꦯYl=tmPoJC ʋ"qЬ&P{ԣ $N^@.FAE1Bū!@zZx7B"H '&NYn^zmB* f,37M!'wK@ŸLI'c}b2I@dB+h@֌6 Mk7'jV+mzUd\-Vn;Po?Jy ks¨Zҙ(!)xݛϜ!r3?A(B{ WʸE)9߇"6Χ,zl7,9(|t△vuz]יƭ::I^<Ȣ&ȹ,vqGnP1p>p/%\-BxǷBN7pG@^F'F7`E`3"bdX(IitL3`ԘAK#w=q'עI^衹=1{ 4ڴMVafUE<`c0ɉX[Oǔ#b©\2@*`f f笵M'3blfg3}uϠrCSV@(_j=Pnc;m"0/՗6AvM[ٵe 8 ^M[K&(X#}y5Vس|w㲅aՋp*=W,mB8fvX$8>tANO5SR6b]kM5i^W$?t=mw/=hV~w. 9cs!ą~ӧ>o^&yS [%J?_ͥoet-Ir$ c2.Z#"{6;VsHT7'吅cjgC%jF΅,qM蒴~IiFV<>0FDRB+"T>0d'R >Xr+x\`%wAfAVc *h@“8d-’+pEHt-!sT<1c_,| I\1GKnSM v?BZIH^9DL@$s͂T-Jgr{Z|W]Q|dI+kߖÓשׂ*i~Xڧ^vBm:DCM;"cddX)Ű MvHE)KN$Kks&zPnZ9b||c䇒SwV\=O|}"i-%yI%CqjyhMF>b\j싋2S(%עCݠ![%(-rCG- n6PV RȿQV;A}"'oZ4–ba\+ //_ j;U G颇`Lֽ'ѻ/\$q245#h;J<.cu޳QF"vZ$YȢ H0Ej^xpd&N#:| y7~E:ę?Vy"X޶w?vïw lNzdOGB[6~8GǨ2 nNF_(~CH88^4yŧ'=N)9PNFx+=Y/Q.=#}/~_ZYkq9ֻO08+wl~uȰW`DG$SS~[YI\)i֚ȉGL37))c\@8Fa" M:b}X+v|`Ը[фcU I r uo$?֗W.M35KӤTX`<oˣD魧`<{>}fí_OgQ95X :f>Е[O( GQR: E=[ Y7yoj=n)mwٹ.,َaA)YvQnqY Wh9su3t [Uz!U';uЙ{ ൬W Na^`ՠܕ3&ݬڔJ0|BN2ziߕ{W^/#Y'7]HS*Z HT# i $UU=)ih!PΖ7ݫ{;mE d&Hf0>(wf* BHrPJQ'04 ̛(mt2Lt'EYe0&c@oZ#z:3rЀZh JLl˘,3+\h`6h0̱r ^1a7FA#4EAI^ WcpsTup+b9Ɂ!2ʤ86h8TPVc"52k_l[aJ-u61@VJusu0u\2bQ/-FU=q>>\c {#* XʔsUK:^0^c <*=CŠh&J"<؁9"lūaʞ{zrWmG͡ W`Ӟy*wrt`):5+L=/AgszzvAzE鑼ʥmoBM(jRw4L^dY%[gǓC{S\vVW`s[0ndMgLE,s;^fSKLX0$f$lK i*+?bd=3.}͗~暑ohO>ўhOnuHm;Bz{#6o!GSo.8f"P\ InϹm۩x 00%ZFҽ 32i,Xx,nhpOܾb۳{ۋ.;S3qx4eX$;IP"8&HŽ*98fl:b/Q)]'ҘjC " .@7d`oy*S-Ԡ<4 IoF@=ڞa{N,c#O5JR I<%޸ʼ`Gc0P+ "i,t<ȱ6If^a-W"!`E iˁ5AR`^cj zrv-( 7n,ݔǓI=SO|<˟\3y-GbԽIvyw D5"NQ`*uS S?A:sAZDA5!xo3JGD»JH+&?YJS.jv,ջرRTIr->|#@[x borj'B NYݩ贡 kylWNx_ -͎ګ@"BSDM%OaK ,+E%ԭ@6:՜K[`'S?M-ɵdtoa{-nCa=RZ~گV)wgh0swnoznn|ij`wˢ! A Ya2-{^UO*G OIGj}jq>U'^_M].4nF?~] <wtfua`,'SMM7Vym Wq}EkEd|k`1w$̡?``|3Qi ~]`!P 5yG7O(gJ\Kv mrUpVa-" ._%`$A]AK$|=SΜDpzQ-&J{_)'Fw WAn V̄O>O?  E./UE7i$Зأ_+{7 z_(w RU*j-^&Û ՊZ@*RdrRRaLTh#Si!f'i5?-䦾jۯJӆgwC t8B=31O1h9Ń.RFʳ/+' )RH ʠ59F^G1@dib (zV4ݡ^?w,./w!\h2 ȐvcCn=h+x:,smtەt7r788}/ӕM7dxu%cXhO,AI{Un4(%"Vm&KXw v U(`7ԀTMK,DF, `[(DDƺq4i8 ׹> V1%,OAה腠11[*(G^YGbvb ?*F8Z+%L)rQqC 6F2L@s#&FbGߧ_2~RƯ)8z@EaU#Nܵe>9o+Nҁw6AH;5OaKͱ.9E5m߹x7'icMKct7jƖqY8pI ΊRbh$! !#Ssbÿ́2fN"`o=b0Rr SDĥ4Ly鑠`(i*IM< D) X1+yzQzP e+t)`ǐhNzJS YEhlac,y3D$s 7DF3LX2qrG$&Ekj)'!Du7"9tO"9tYg,b(xqtz>lA[Q`H (mOnUJ?W$?}避*:vOh& tw?h<0_Cgf,[TT:/?2F#p#wFf:B~fޏ("M%` 8^o sy tṘ?swK@T^KpF9>n=>v|9>]a3×Df)|pf,`R`#p $|`ؒ=5!JCx`ZQ$(^.e+:xeغ1¬clݒC[YaI[ЌCXuHWRӏvB %[>`l. 5n4?ك ZAHp!uJGγ0.4+YQ$݊8"Hz Ď_9i ތBLqo+kKCvO<*<20k$`R#[}3g] fvL4S(t 4úyw,@i+sV ,Q[!VF8טȭF[㽾mA n!b?,(~Us{4xM 1zN@<=.wHq긅yc_!= F;AuThL8XMcaNCp_2_}[C.(xgao^xL D&v"*}e Sv[ϙlA/NsVDz:|#!1Hx6֑с ҧ4f8 ;%RfZn[&εɰFY(.&xqͅjf9J hU4]hyjSw#Mmʔ'K. -)/l rrI+u.oGƧ*8ԙS+`:.0$Ȥx"b5.e&yU,?s|4A2p²f1XNϱ 0L,\3߰ql飱c(N'`Sϗk Ю#[ !ՁesGGq`m@RDΗjC;U~TUmy P5{S{ kuR.Sq}6. sXXZXu2f{qw>6Yt ʅ +(EA)ZWWO ǻ(8z0ۑ͹fWۍw78t yKq9V[>3Id͙5Wy! ApQ9x\ۜX}+(kB&kmI;`ahE6W\|>roԬÅ|n!YW`olN G .4j2p(qYlC G6!C++4mhaKn kKߘ<١֩u>f7CpQ"ޞw,* ]Q8t} RY7Mr_ldNd^)SW Z/0H 3黨Ze.w9^ >%u\ȼLrkL3iӬ<IUpR;T)j_E)6'mNq9R5. oG06̿ۏ?a6渐~l6 U"E$YXm 9z7)h^ty_{jveARZH ˷LJj@KcV%=ع-αsyv[#xP'a;h*px ÉUƆGaVrp\o ~^6]V|l>U"DbkGW9H҈:4ڧ;GxD zh^;TURv*W,'xJI~TJ]{BOQ0 $6RCHnH f)7xQBڳҖ~{ESjmJrWj>')OpWQDx#aK $/(c{!?H¿5vMYa,\*՗¢f:zeR1Se  p,DH`dA@$UHyRipcs1IL~ZM?loR$ MˮuBCRfuj 5rBD gk{޽OoX⃇13;Hwrx/fn3=p9@L1DF$Bpb:=侷U0?U/ItBnISE:E%1~*U*4MG(n$ȣ,mq+ &q握6*kQlH}t%;$'[k C{+,=d4\tOSNe[t%pFѤ%|准TUi 4*j(Ec4(WV\Rl4 i@ԹoqakI"'4+v02#7N'Wߖ,{aR̮?ߙ?L};5yQÓwۈ]yeT8%Az9(/w ڷBg5߸BiK<(h~zm zCb0Y(;( "޽IIZ M2sMn"xi/8Wf3xUh_ Fx OXL׶hu#%s^1B刪:ǂQm_PePһVL`*m{ ̰5źx{T|i3ȠƓۡY~#e/N7o]YaNM \Qe/%̤W7e6sԚD6GǼKNQc6{ajp\Gi_zWsJ ҏFۥn"\i&hsjA(g c߳+"C~3_` 3 ./ڹ&cXe ~΢ųޘQod-1_։>=k>oFᕙYː5oE'֣lЅ&*62J{m2R2Jf[Z%3t0_4eTKIa?>$$-ep"̌b|VVgȢpq??׳I?݇?@ԯtpv`<4sztWM A`׻$n`R/od P.QrˡInK׳ ?gےy0/ſ3ܖ>z 6itc۠V(rgߦ㉹\(.}q4yC:Ms-kG0sc VƳYt\jޮ ʶYL5G_41i̢dW&du"w}5JoQ6|atן,M3ɵx/@oǓ̲bƅ//sF|WEw7o>,ަ((M2ĎcvbPp=FeQ~ǣwl}ƂB\Mgx1z= 09weOt|;n4٧?@)1!b}q2Ez3N9eY8gϿ@5'W_2Q)ށXf8Mpr(s.{d>Ŏ\Ic# x=Xͽ܂{|꥟kqLd>Xqi<۳1$xr%+zG5 `,ank_Im<4onDMdя}֛^' |9!z,Ct ~6dzl dvAty?ۑ~^2C7ZfF9%SpY ?DZ Hܛuʜ߿6 ?`O!-9Wݩ2dT.]l4[ *oHu O+K=}r ʐJ`O){x]\u'M/..m+=:XG Q ~b{:314Ю.TeU\/5fIl6y^l4I?v^dEv^{v4U{\Ha&h{, 9 =J(D.Ÿ )%S]vi]:BՋT_4Bruľ^`,@%~HH!F_{*܋c?VzxpcP6@P _"Է8UHhG$9+UPZҾoB-1N5oxDG蝄֠oChPT}3fk%::bui=\P3Nz  B+ҮxE(hBiaw(؊Y'Qg+G*!Il=8.֥N(Ku.ն`kKO=jO sk Ϗp1JO0BqBl[)#̑ J*%&w?lvV-l//Yte[/|k4KHSzLj>=|"X@S%úsy ;K l8)sIˑim$|ΗK^l:_%;_FX|f{|IP<ĶjA\@r<1_SU8}%TXY. qĆ#.$OE:v>a S i'Żĭb!+l΍2_|݌%] !ΰσ0TSx>Q#!1HL>15\z'EEeH1;{fE]~BQ.JC ~ă遈1, RaO)B{0_C^S$Q?P;cإ4";OҍTΓ<ΓIgy'ye-?z>ҙ/< 0r@^ &Tq(A!FDq_Bk+ B~G,`QX@3, < qH;\?`v4h%;e]=%JͲwf^$!Rd Y,'|_0%Q|M;|ECEQ]oGW~YvGCl'.qO$Hj;gHqz83$- 0lKlt^]$6 D!1r4!8nZIGۗeHd{`\ t,M&)5} ӲŨh5cZ۷)(S'5"72.i.&G"SznWQ3,1*J"džz"``بeL4\bhvZ[H#6ZLx.S)T^zVĈQB1G;/#4֌A Glַ}Q+ַ}ފM wtO߿ϊD1L_=wÅY$Heˢ_1LB7U:䕙RiRy)q:bh0. Zc KIF>`$SQPm6 J,eENkN1(\0OhaU+!VN,QMQ֩^[-pR ʜXUBαaZݡA$"`(RPHCQr0J3;97`B \F57\QXK8 ,X/aL;Ns3!gTr\3fvb\kF=eA_r.޳v2b|EkDア6am]V]J5}0e] `e42zwKJiKpymf}Lwsmib|f_BW7x|pkam|K`FXT0훻!|* DH=m0 ~XZ`0mBBwtv4x0>MpJVۃ%xX+??s# Deoٙ4rmrYC/@$a!G%7-àv_wi#a%0_h*+1mO~v16fR,FI~d3w7X n34#ŽV`֪-a>x)[m)?MbR -9DVQRzw"2FPa1Lpnh5>ͧs_mdJp֝o{ok"X6ފԈYN98HL(|f !jFhUq|\KSxtF^u-R.H"bz.6V-e F𰆘v@\Es2Gc`QT&R4amn+lOF8 u#pO\M8э؃ļ^#1*Yn~ϋ7`\gVv)D/Z9xW6lxqZV<}qc ZRZ Z9B׿V ^a&LJ]1;&O´冘l 3A?۶|!twRai qt31b4+q3fZBHTl|ӵ5;[51[Z;ہ>>TtܛskSUou;rYL3Aw$? a_z@άi_n70y%rww_0z:5 IH7+a}J~(G҃&,ī ?dLbIQ!@ɇ:o(g[xdMIiTgK?[>{1CLtwRտ>Jn0ۛZVkX1w/rdX{ܓp'köϓec>T,[7 OtxS&􀆮o[,c*v}g|;MQ1 Ƀ}+HT{Hz>+=G=6#8!CmR!t,Ǜwx|{Jѩqt BA5@{ ύǭ)hvt fV8nG9Hٻ syN?v*zbs19?[czIu@cE!?mx*W%<z={(ٛ0n{TJb1SNY]8h<;SYx:`kM,!CI;Kk(: )U,ȕlǷ/*.ˬm= t^z@@}e|_z- ݈S#hNFZ: Z6i |@rY[>qfJT+I$ CIRzabݐJZcMm t F0dQ[q--"l Bb hGdRVvW/(fcUR$1ۥ'UJT A6aM4 ˾7oUstT +gzu?u9qdT"kaAF^(=Ђ;KlA)|=EBk,BDa'eA)o"U2O[W J۟_o'h{*IFΌD">h!Xq8r˰:!PT⿐=7.Dmf`=7.ܻqa̕8"+mMmAnxmLn|Rv"SdzW Ҁ4f"X -( i#FZϐ^Nauk;`mPn`.*=M\x?dzo >ag7,E::}Zhj扠p?WHNg2fvwSMUנNg[l%#Wikሔm(}{ނXI.dJ[޳Ƅh%4y۬(%ڨ[mzm@~'NڠcS5M L} 3’"AZP2$drB#pp*$\رhjH:> ju!!_ϔ)$D=5q> :(rpSٛ@k8{YQ:-MHp&U$=Rq.̼{ #b~B֕Oغ!>~마y.E&L7#§%JO,X7ݑu.J)rϿTo2S}S7>{YU'o6 EYU<2SQ37?l)KO12m&594AR@Zyc6V|mCZLs-*;~N"' ]tz(8uXwɨ@k޽kУ B,b Z Fa,`cSQ,vcφyo4[Iwj_}ΎY<\Q%qcq Y!#r_Fjlxȉyu^^}̞ ۣ xfԣiF>"}֠٧xW qM 5M74f55b_mb,y !uul~t UTJ$MG.`]?OKoس~yz[|M:dTKrʨ뢾z-GC܎n)4:é>0"ьCFj 4""Emeӈu$x;wq7л7FM(^<rzQ<XYD.EbN?.s Vc%)nxQ3S-b*_ĸHu܃\ss孔7O>n,KU O1wavw"hdZ|#[?H(KW(m/NyJ9qYGG{fY™v!)F b)P%+ 861 DHMa$5Ўc Vm #OUiV1jCC<"T0c< 1 X0C4 2D-XT( !EFͺ,_q0|~(ŊKSX.1`HT5%sX1}:u4[i%!}LEߥJ5ќւiYn,ջ ~? Jm^159U=a&QJkL\4V_0qa-6,bYiZJ BXE ^ˉFpÈSH4~zX'Y`Etۏ+NGm-c6r!15!f: W\ϮV khQؤȔ SY^npX]ұZ Ǥ܌ [ܬIi YX,gI{8+~Yx,!Z;_lu5E/)CJUϐѴ $z($&PªlBߪ}eyTP;a:@J% J4c&mI}( h4hȓw֦.67ϷПS"IõDZ+Cfw]w\*U#2 +Ay09`!IӚ̌_JWRcPZ;u/(s>efaᅼCja,+y0fL|6^B`Ym6) &Col4zܭQ(+`q^Cc3S Rc>BliP2r.Dˬ! K[:KVJvs"e!U%G8j@iDȴI".DiiPbF(&QF8|7 JI83e%k Q eTIuJsrJuDaQqJ A#lmVFy ")V b _D!:$:V:h BT ?S(B ں՗F !oOE 0APhMeiVK\`;" Z j4F;BK,ByVqXm2r2.u0D+Ģ>hyG7C!h&@bIqXV4U,) :\")$ST%6:R됻B>Dc5'e#CRZyJOh"rz< [^RGDz/t7wB!\D =\ɤT4Zi ( b-Pi^RԷh5WPΔu2&Ej̣9;AD݀{ jn' >*6ZrB#sX52< P.e*o/P qPVTP5勤J*Õٮ" U; v YRbQJ7 ZdEέ(,h*R *\3mτYY5u{mǗT.֓,Mzb?3*Z*e=4,Y+l&e=50z\GƬMxE׋8Aha'i؛nqLPK9TA%n<9`xMfzê/-yEK4~{Z$'[XIz4ioQ 9\f[Զ=SxX&%zi - 6^(0F}03Q ᢬Z#54zEP&ÑرL(lnL ewQ/6,]dT{Չosj d'><$H(LD ^O 9Hx}`I"$@߁yvI>A{!D@N41 @=`. :\Y'Ԓ ۭ еLtgH[&2,іxKÌO$ZH)EofǨ,)em$w `c}T#?Ryq*P5DV*Ev* yFN<4,M"qkG ^i[L)h9:K!̦ !dD0"ܔ[uId /up-T'J&kje04Ǣ-9gzDzq qlAb5[[2A0=ޣĮ02cDa0[nˈ2 d:x}bͳ>6ܬwFs^8q^bwj~!DfKg'a1J(5Wi"k%uRqCo_Q 1c`&A[KRI` +iY ʠ6UBSItƳ}N} 2ύH!zFEP2 l - CqZ9PA;εdEz~58Ǎz%DMJ;Z@n%^ ҧ%ĉB5β%W׭wA#~ٳS`pPY`a+]~f/Y}\> [iPfD`ms!};]% RPIѲT%tiəVأ`1<D0'= QuyyW 8Wh YT1>_&pr (> Rp NAxm/;oR 強 S&*Ѷ{ٞr#hF ?̜R#nJDCV[w nғaZ3Sk۱ZfN&xMpQ欄`=:M>KpcPHS7((%ax[{@WGyjjr򴊷3EXlfY,*AڷZ8Y]?b7ITBpkyyzxշog7嫭͂ihޅ$)JF|h7#&6L#`ZsϚM>75RG!pS^mO95`&6?-0XUR 5rjFUߢ:qRji4&S7?rÀ$%7)c~{Tm{fں`{3_b#QL}0uoy^ sX -H֥dDܖiYTLemNP%~?9 Yٞ)$AsQHO X*.aAdEIפE^7`|,*0/J^un<_JTnznōbL,wK{$A^W1Ttg%V%!Q$azWVF-ϔ-y9խ{W?e& &BƚfJ&SjlRq9T'tǨh6p^t&@ed=ihTڷo=sƌ> TĴ rwwTe#G m;)Q6GhrN"2ʹ3ȞiTl?3 %vv[溲d~ym*x\]?lEZUsy'zv~m^,Eom=xʵq_> #VLW(ᜎt6k8dR䀞!gbR%;D_^!I!z"e"-SMz纜`rR6Ri~^= eT6D__=9YT$Yqy}EƺMBKIw) .%ՓPeu4&VQoD`T1Rqvi gmESVՃ {苞/6@5yZgafus̛'ck 2 E/W*Ҩu%/2_{53z { 55CvyciFb>gt'(V: =8mQ9RFQ܃5nQR=GgjPI=njv `[8FUw'NȢC~z~q2C D /\Xy#VE}~`RTulΎ)SY?sYmOiQ y_wZ+V̲TC%fT[ְL&3&$XeG x&x|R6Қ<+&'|YWQH.Er6 ?༂mUH{uywKtr.>'[\{{+;Bwi!GVjFf|"3~\\"\&ٗ@ŁuP{3vC0nEڣĻUZ`eAH>SHfyED¤"62'2*™%'L/w "R"Hط_t3AiYy}߿)d'ccKf|⡅(1+jw֠Qm0gekQm_>d%$;Ϯ;/w4L0/nK.lqy|CFG 2hdr:{ⴒRުΞv?Nִ[T;aLhv#c=ncBil`g۔t4!GsiA\Tg_ҫ]#e B >Hw;*@u{y HgYF#ew4 dOwn4EHSQ([,>xQE MXxa\V9 ߬/CK#衉2@`?޳@A}w7w:1ah*lm8hK66z<ת4mC[:צTiU9gUp\=iU_D4VAyk&*PHz vkEjڒ-tfk˃R LuEJa$#>Rj0E"YBLS][K f6CU7n+5YV Pү.s~{@ ]B cRB()dHbk[ʧZT⢔H$(oD:M"e1%0kA54Xz ss,ƥxik@^Ss{CWdԯ|, L)K˺d=5o*0cZ<،";/&W_vx@_go#ϵut׷_f6J?F 4R[=vzD<F&{"p$dp]ۦo/;^jH#Y,ѻ\ݞ?_CXcgH0 Nu$Qف#aK,kI@q+E i[ݪB&A>t btz)B:.ퟶsc~rOx.UfDt@aB©z SG.W1?X$R"+0Up$h~R_, % Ś%oG췸NaB(;\kA*17sI:-8ۈXF*<"[bǸxs5WvHG9:[S*C>WeҠu+s}bXVMY#iPYfUXϐ)\=pɫn\^UÓܿͻIkp<ƥGqj7%' _(ZuUO ]š;DB8~8 \Ga{qj~*U_bЯ^N ^a UqҞ'e T6^O}f- FicvQ;q47.7 A7'?Лvosqx:o`in;1S+B)rm`lD"CGxqR>d٘7 Ba,J?M@VbyP0aw7Q Bn.M'_ul$`RdMۣ פj>,uvn\;zcHX/±UW[%*ϸ@r5>RnN~Pu(W8X"\ˮ.U9b8Toz_b7'\snICC IlytpU6|(aE;"i$RP|pgCZx@Noo%D1h*%D='OH^A0xG,*e* RG K8BY!&+K͜+X{HYl'JrnjW @q,㉊A)PE]GƼ"5NF=QbtÄ@|4%Mg4P}}1ޭkJ´{ʗT{g/{ꨀ ȝq[HwgjSr.='G&X1FB$b"X* "H$'z&mBP,AcYHO4#JL_T =%IZ:U$P8N(*R5\f0$4{. a5W+UY5C|wb tUٞ$U gl(M/z;W5D*PRkwVik.\fо Z[C1$G1/ оw T8e 3͛>bh tDJ (yjF16 'g[I1HÀ&6y.Go5r|vZ~)S1H9cD`11v^8u.%{HヲH$9kVfg G0UO@-QLcZAeBiBv q0 31ʡF ވ 4FnW=QG]DbF59Jb+PKg>v*.u| T%sF-T5# ܟAM!3jե9J4<[/+%w`G׀$oy(gG0B$-$en[(_6)$T VB !v)5wŽr$cfqֲ))VR"0v_tpe0w;]:X :^qyC "11,.lt%bH!s 4y]Et76䃦~;ҎFF,jɧRG &F Hf-(U]Ǟ׍fg g d@h8H[<1(\U/z9i+nJ[Sj:F%R xF8jH \ _aD [9;N*1m%2 ЦA'fԜ;Og*NpAuI~vupJA3LsN-_%X;Ðppb(C*& Lyc.BP$ e3%pԺ ժZ՞eB.rO`[5W ι柮/^QfzXh>݀;YS }oH[Lf:Ifnn0"7`5]m.z8YVfRs8xέwFS14$&kt=w.ɺ9~[{>lir\Lϋxy-a>z'go6Om/zK.馤d~C|Hi}9#"{{Bo!\L"D G[.Q2+Tfj%1c܉D2]z|[q7I-+/o߯E5;Tx #3+H_ ^r0FAF^EH:ougMGL逛X~El=fiz n3 fw~5$*'IV22^pb_jŗ:o\~>$ r,_ojbV, R$N dCQq&5w9J#T;>۳feӋM\ۇՇ0-2?_|;7-8jkYԼho&R6rqk*lT6{U˴>૓M3//r58^0ɀBl(| y|7wg'$dof=“u F orOwnPNx`eV)ݲcET Ę}}0C S=FAGlt].Au uq2a.I)_I~ɝPp-jKT˦2b?h+j/8vAaO ?{O6r_aClC%ْוd7ݵI}hX iRd{^CEH%̐w3'!0D^; Q*M _tʷKMO.odJ'#iTTf*3zLrBJݑՓ-|<ߢm}lSw^ "_osŹ4o*%{]oQ*Κ8<;%h#vHd:fN4LCNSq#pdIBpZPsKR- e#h7U8je(piU g9램wFgniw-= JzE,"bhQQ ^Za$K*BMF?V͸B@ q$#C9Gx扗S)uJ[FXԳ!'ճtڱLM-piHb:,3kX{cu!hi&FB"XY5e ̐@Ȁ2i!S%^nd-7J77az:HLDz IDz )BI ˆdGzdG0(uL-H==7Skj8!>B2ξNBſcw,p# SVx.H #'0ƂoP TIσ: Hgpf aZ;AK ?F2hQED󡖠JUQ 5AI~Vox-Ibv;Ivk4:OnGOEU?ʪzL*_+W|a9VlO}c+ѝ{M^uI}&3ӝr?Z}pAQv[Re.>Vv>[޶ފ(UeU%eպ5gXWԲQt.!.ntHJI;XαTU{WկR ZQSiEHuXbt`A)Gz i)Gi;H<|R mW 6P:*ՙD֢Mj߰tmZwJ)ӗꐓ 9~9[/صKǷ#Q˧/>':tw^S\0a[}9(]"cpcق#PGS2Eӣ^Ujx^p1=<*Ν2lr;:%é~Ut(t>ZjFaAB(CT!mZzbu2PL5 uBP%) w+@Bk Nl^DQPq_ۼS~&f;3qUo~_Ao:5+~YE.f1\x g?HzCEVbkoKU\z \l&{QyrQ3^?Q-ff+mZRL'JU $JUR2Q $-hRGR c‘RIJ ֛9&Rd 43}^<&kq'>0oqYuÃ=xMry38N7^w'B093W/BwQ HM>B<YP5&g%Qx~ ^ b!O$SFbBiZoyOS!:3$ RN='\Z9,C$>Ta2OM> :FUc !O 36 9h0f虓OD2O)|{ w@Qʥ0TR` {$TFBhn9 H>?-,Rq! Ɗ+aiK; <ȁ~7"¥fٖǒt [-thiy*%r$TOΟ"؉֛S+7omU5T(:]BEsXB7_bt?x5QzAO1~ڎ`չ @6`^sZTt<`/_=th)cW) R[*P ,ќKX=t-@=HiG{*Y|rÉ' ro 'òmΊ;Խb! Kܺa;d:ࡂP~P9ՙRdN UC#" ĖݯFYX(bv¼av~\zm_<%r|?{jGV^| 1k j ه{%Ͱw7†w>W#D.l>Lh)ied3y>23]˕kJV 4DuF&.էn<)%Hh4sK ř gڹP9M ˇ.Ƈ'95ZqC }^_n=kvlNԗ37}ԯV!" %tq#jΐ&Yn\z?\$f/'pѝ_,_o)":J}~| 8cޙr~>}w"+49j@؍ `1rBGCҁ/2vvw7T>wwnz>  /}Y}kh· Ul;}2 `An|}wYQiy#@ʌN%,{W:?!F`k~wxtnr>*%d:ɻqdW ?KyXtz3gz0Ӄ@%q[,'CJȶdH{g^XbUuBGqpYW% pU'+,HgPB3 s>S|E/j_0 9\,Ucegdt_Pdvce@'Xy+_aseaJaWJA9OlN!LVIeXX=kcXnhd`'L7iZ[ E\3iIٽڰX!)^9mڮ1goR$!UVʖ[تJ:ȥ4^ƊtŹ#\VM|0j*{{bcm.[ Xo29PJߗjgJ lWeDC=qz^Rp im0:1BC<nHBN<G+/Ϲ.’!c*4sÆyȄ_6nN DCOW!` 1U0$+pϟWiTץ_o8X{tvktYr`׻"KEta]K^%;VO(v~7[#x_#/A۩6Mko]k+eL]U N"mI|njيL~ W3,Q+N!U'g*wunZP#PCy,_>?>{eW盾57gζRu?!];y{ ^Zq6)F2m(j}KMQ$A9CwbuWk#$_C{2A¤+Bt6 5);eB jI :IOճ_;۫{haݬrhLpz p1}o& eq&4D~wn6}c 8tsG6Ie]m Zeܞ H|[WmF:͜D`">Ǒt :8SJpXZt` $!nuO]ם1,#@ ^bZ$\$6a{ɚ KkKf5'YBZkyL'@=|*aŋS98d>Ns?bJ=ahFz0$޶+e[i!JܻG yf}ŲcR%42=zgIҥq0˗^GZSz2g2 ̆$[z"2i͹}aMGIwxty a mO&}HĜ!ڷ=_W@kwؼÇ^:tQpn]P_lA.xSb HFT_J!v*^C/UҪJor (ďa_ul&RA{.6st?-^}n.gj@SveUwbBdO3̑-p'@ǺqFE6qeZ5S{8GZu~u@ u /. ųq>}>' LlRq]D ("gPB݁pU(U~0^~^5O yk?O"&>Ez#G(y@p A};xP VAW?VS'_x0!O@hB>'Iì Kup3f1F^/GO#$R0L3t|y1J2!m O(Dx4Gl6Í9X㍎pc]fHP LϘ=7"ra2꼘Hi"O&^k)#A*jCu6`ar`8R8,xeG [?诺޿^gd[{mhv'K9[cydA9dTsKOۺN$*KDRVS{J-P @;ȘHca=9fcv@e!oO\ 1TX@&I 2S2< 1{j QOK8@@b"nvPq=_S:u AU4Z HJ9LcN1FKI_=&xsHK> ߍT90'JE~%!>þלPr,ӢAAX \Y(?UyS&0ɟI@?{RF A#5GWU/p>_Xv2; u&}\ u r߱shB9w &ߌ7+rF=6VSS V   |:D6@!飣hs*S1҆|oX#Cw[9Zk c'qrQ=Zf< ީd[38Lb}Z/a1*ٝhPfgsܮ>t߻ 8jN&[@g(:8xG-3H@o `qۡbs@ _Uyxe<8,rCPOu\Z5ƭ!|R2kjwUYoNY QH ug S3eN˝"5PϨ8e|EZ{5Y)C 4`2iaP~{O|_ fﯯSD!JX1Ň Q" $I(4ź~72f "L\j-R&XUW5'ʙVfbWr㔝 ծv!V3۹T[z{P}Ew\]1KS Db?ıtbNBGj4K'2B(~W& v8p)DnEv*̃&$Y(% qd?&=F3roT y c seӠ5tVvT xrod!Xv398SOMCSs܂RK)4"&zI!Qr\!H%suEg: hy`uJŕ3kvJ1KO<T*L 7E'M6&}RIXڣMG҃yLU^|_FkuZ]7m7ޝNv;vkpD~Yƚl~q\)REaO`a>LOr+4қM2ZS);5J jwlPxT*˙[h}:G1LҒ4>h?{}:z~Iu{֐ծ.)h>5+dcn]ω#oTnVh!O}̭Uݧr*m^^Fٗi/c5v,Ozۦ|ۯ4.Gc-+Nd|<}wW7n_x(6+(#;ƆIoI/^9䢞_j[!lt+h:|w(g^C<$S6~E2 R#k!;)+Ϣ=e]e?0m mƳ 6BR Ntt1iOG6#}=޴Y=K]3e^܎WcgrqA*zaoT7So> y*E?̦.,q[9o^Jbfb0Q_^?~J-ubJF~ַW5oyn N j WrE/d~X$HT\`J*/u,>ȕHAހu-_Vgn:uHi5⦿BO5~-GL-y>dzqVܥIeB~vf ?ܖ^q%rM{dV}IM|y9Kie+TMmDw(3M[y+j-_"Kӿ}es3)%=ee(R)*N\;[>7^¶֭L)- %sWR^-0N# ^bPՍ‰0oS6GkKz m0b2J.H` \Vwh@\ x?V}}>.h-vFtT> ^Bj)f;zh>WC_]yL6)F&&R pA"Ke)t8t=$m:W])=KGFQEҍyf_$sFl%$|<<-b{=֒jdJRP6π0N"rPdT}}Є츾HDADFBD2E f4± g"Iivaz( Åe(c~& 0|p?r W[: p#cq,94,Qjl(r၌5A%gH 2j$hsqthDR@ bH,e1Ƭ;IS&;a;d2uUo{hű4 ?3mקɵ ޱ 0yF%; KCeVOq:/ؼK.rʒS|-Ds3֮zLpie5_Ͻ`֪޷:2qs{j8Xmrܴѹ} ŧv8GKqʯÏSs QAJia=c#J#,}){mQܹ)ؘtw:`a=ݎYkHw2}&WwkI~z%& 2<md"'T2U#M{dndMv=5^"n]bϻu&vn]`I_FCC% qK 2q@q 2>IVIjrယCҍd?q9Dr `w3fl0b^@\⋟ Q%l$6)gpD/R6ݗxÔ.$o.eT9;@05AdwSg%FyiА4xcvI;&FJΰO+;mfqWw/ݞc|Kf v8\[|bpL`9P5faw\2Y%}Ng=EDVyF7z-Qq.dlSY6@e Ҿ7awCM{4p}̀ ?/ [8Ө;(*|D!]:3hEo*ϑ`yeb`$y>WPW+s ;G>W7> FAΝ03 3*OFcٻ6ndW nbU'/I0uH* E o ±De$8t7TOv#Kn fBNe8\tE1Znj83\᝕[ym]Jx];E5\nݍGf6 [t #l,u0$N0$NrB`1;h>ZˬPVw :]3 +dl rnntH( s$̳qⱏv2ĹX)d NsTzpptH y5i=_^ gǘga $BfidHrUQJQ ]wꞋM`xk1Mb(Mtq&K8K{_&[`_؛\E89* mk5j AD 2XDrg`2{dm 7Q Q8=^:P6'G\Ea/6}0$jPŎ^ Ʊ,N]IEDCꇒ%֩$TopBZAk0nBeO̝_*pU>4C;;+{W$\nw4VkH9W*Jߒ?_XaATrF4[䧣Ւo6lû;`)4fʋXʰ"12ҳ{o~Zc;lp/0_ #,* _wasQ\x%C 2D4V\'m$mlwxSn&E`+V:K^)v$~ui~rTG6N͎[RT4D}4 xyv;x8W#ԫwׯiL+lFK@A]ΧI{~R)<fxoC1;BҶ;>juT(G>-S&>as_4 A?)XNk)2h 17Ȓ~@:wwV; uZKQX:H" n5*! 7Q"(Js 3VSLaMX #2XfAeB wJ|D(9/H)(XR=-9Ip^#H*dZՄaejWCPEeQ"!t 5)86Ws4øpj074fa{*y=>DmwгEȍ*gz0ofz>%狮σLJ%<_=|1@ @|2O<)<9CJs{}ƌƤpc,w3EWX.P ųxdea$`GXTTaIRFt$uI"4eM"zeEVDu;Y+, ozqх"4G޻꫆Uhaue*$0H@~HfuB(B!FpFH6^ M4[bJdmUEƔ+!gdE0 ~4`QrЁ+Q~H{Z#];ڪXJ J"Mjpdk^br p[4QL:KI#"ĊK+W,Z'Xh'R W=wQ ,@J`LhbLR+ŤPPf 199CJ^׀ U/J2/ C:{q}Xؿ($zd9re:ry$  ٛ}2]j)Bt5y٠n= >q.=@DDH\,suxfǫ`/|'B")~glf7w& xh q 3Wߎ.&J) `{%o?s!mdqY)0mS\NvRsLNzjrt L2[pdN-Ÿ 5%)S*QHxt2πZٛtd|BYlq8wW~l"89=9gYuکwldƳIlu2۽lׄ6ciI=LJĉ"f 1+6ڧ`4n-ndw$T R SDS$1I* "P}~5qg͙wfqG:eE'ZKy  ˅ed&آ3 qKm2XwB*EQExY,Fyv!q!I[R6ta'l+-^;_ONz-Ym\{~^26!_Ts!,uto(|E85{.UE|KGr֞݃&}COZ]&vJ{8bB-zG*$:"yGKgGAk)rZ@uZ>캩SҚ/=.njW> ~'Bh7&Zv$`! GZR; WIɞ,kamPp $oh;(`muhn'Z+:7eVzf9M&h%aGLk6`M;޺s)8RiQ0Cť[w>n'{D1#9Q[G̵*j 1VE1A{ma-VC1o7Z=*nӵ>QXf4(b+/yKIdoC_ϊi㰐*a̛a50~`y8?wyN<碫w7~l12*5LjSHLy<ᅪMb2Xĉrg2C Շ)A3a]'oh٩re\p1feJ.5t &z'*v3 1$ʭ ]ew!N0Im`kpkW@M@J``]wJN%MBF0oɟSR/9uF:K& pfk*k>2}:ӏ1" ?E3[{K p#眑= [䧣Ւoy^b'1s\Ōhb0 GSY ' tRWc#F(īb3 9 N Ԧ73=EFA 狾k0/κŸw],LpdbyHOi=& D "I0eJQQWLU UW[DX5E+ {R; Rͱf'%'gHdt~'y~I~*>BY):fno3_ï0Y](y>pu6_~ C'cha0Ech;Mbb83:¡5^XqHnh!7Vj: WGd\/ס&UVqjvݎ.E6Zm0cQj~+S\n Y^1QH0xl 6,zlEUg"B[ [(45!yIH$oҪ^Z^U]K":fy U_x[,Ǜ|Cx4"+XyQ)%܇0Zj¼BXDPXMKq]-'c]Y1gdE0%A+R|* *Dܺ0%7eju C AFh` K9 cUԧ Zr㌱.F0"_XkNZ8l$NV͵laru օYaۏԄAyvumE=!V^xd6do˜v _ׯ>̲G\G?<}m1"qjx5dq8{9OqD]d?lft?ݖd!} pm#mvtw5 !0K;3ĸ '麂ђ FY6udNrJ7uTH(@OŠ^MbLF`딆ˆ7+ʥfE'lݬP`>;0u?SnExsy1yIʋ%'ﳝ`A;"{4p 0E~(B[l.'7a#r~f3Ph !20߽CuFzMҨBw/H( e2kG c=hxx~;<'b#qq”@AwH@ATy͞^0UVn\~x`K# d{Pf#!QTۚ*sCa1[ ӟÕv8\k%Y YK {G}ޗÀG$Ts{,ZTU€-˭ b* .9WZwPU٤tIǼu(n'Ze8l`I~<{E/10خ 1+q&xZ})< {21#s_4qO}sw?ឹyafW`CxnLqFƟϻcXv󩇱 ړO^:|p>Y { &t6ߟ.&ZIt>C3,tD]=·^νXmu٤I&ɍ?{LrO qXe/A,u񒨌eJ2S"[s;8n-j ,o`uIBn|tSjE)*r: fֳ֍vm`\6p~Flo3Iwj bPЍ;~i{w(j;{ Ff>~WVӑH؂ƝcAr^k+H("NH5؎Js@a$ ?'OMd(g;y"I3o2-7ÉbRw%B9ÏGEt9euy`9ebuά"dpHƑq0yJ ~lwzK<ǖdrU+AmFpMW$,F _Fxm%lGA3DΚ>Lp4+L,ܨL2WXB4azdq-da89Fa ӷeC s*4o=.C2!\ѪWdQdd=l67Nm-Ŵd2uߨ|ՙmrRC%YGPAшtyovJ/jH}&I-s `tWL $uó"&S>[!Ung 8#BRfgC@81 gw'=NtDzHOYǧxmK$B>q(U$]宖_"!͟GE4XY\H„Ta,?k`B%Zpo"(]"Mྻ Rw2ZmfoCt?犐tkKb&[Jԗv[Y{#i8SIE52m7x6+dڎןte YKG,52 R\aF/ssa%EXZAs^)EO1d܊ Bi"*Do)4]W6i۶)E͎j(l~vބ~ khX#( ZMfwr)pK7b[Ryc钲=~[)Twւ*f6XFClO6aou!i]Iq7`݀6x,w9c}xD;HqQwhV܁ nYSٛ4?BCؼgfn4(?ZH`cdw'h=)]ŧq( n*d;2_tcqӰ`cqӠ7] RYů5m"ЪUq= HRڒBµ`m(j&9+ܒ 3[Se 4%hݣfke0tYçi,ѩX 3cCFM{7vO#QVL;Gs(=[%jSF(q |{Gq4'%Ms J!9Bx@@j:w“wSP9vƎj}Ԓ 8 $5K)OJke@p*!95~A~j$,EYȭV h"fMv7xb)Ёp. .ƅ)|-⡔B~HR;u)qFHD1UCTPDna {M {ޢAEfJǢe$ոhDicif P9OWtN.Dyy_NWbf-U)q؆A0U9o&9;KaӥU{U/U~̴n+?ɽHU׏sE |ne>nRb̄qx|1] EeK8f)MN&׮K!Sh^g0D{~%_lRhO{ m J^fݑ3C Ew\B q:b:m̟!ѬtUZ1jZ'fX߾X*?t`>=vnR #d4cY T)-U}#-X%6 R1-u@F{ڟRs19yAn ;9R? Ypv|Iˎ`qJ3lm(ͷث-:lb"+dK\La˅e!ro}rNڈ B/i- Oڰ Z Dᗆ_OM~76(SE`flQBKݥl]7T^#cΆ3jJ$1.0PR4B׆~Sl%t ىn V/M\.Z5wUR3zͫWy7rf ,lzW})~|pP [lՃ]WFAPᲫDZ&>4ŋa$xr%1]QXc9k7c-}堮Ѡ*@<кշ``9#|oۦ~| 'se}K0fJ)InbuBS Fѹ¥ xjF|3MZA@׃f̯W՘ŝk pq7уGHǚ?tJ |_tbhY鰰 B)SZ\pD4N) YZa`+E.lYc+h}u1*[\tcǶ#fߕ?*(?Sgo+)v=ㅛkZ`bu㎇c؀S{Kh\>< a-OXVd[nί0$X^"{ΙO``a͇0AX&eOO7Qnr\{_YVBssy1';?H|JՓEE 2QMrrˑ(KJDKY mX!_UbR*JpX NJɮ6q"w,`f6^No}%t_/Vw?4x p<>=(u,6$~zXRV|/!l s?g@%gd2B]nأ_F)\oqV t]i~fd?]g&Ly?]saBϟ }IAs|Q{߅; i"dJzwKJ̝j$rx;r&GT %bĤ]T AbX{*Ri ym"wA^ )iA7_b:?CWdd6~QVx_$zxԘR|fqe|l9^]u5a@6lft0a  iy'mfJg%̪*bkbM*mgl~NI -ܖ`*C1/D𶺠9˥44wA?U輡(v#ݳ}P&YN ,[q̐ܳZv0I.(vTU ;bT FbbbuV`&!۱TbəԹ`l?,T0IL9+)7[)Sqߺ$/0³k=D 55Xc)/@RPK,M`b)Ga8g.$f=Ԥ4sŝYQ&-5FP@RHmAtόQ]q%&4b a΍tDU^Q%KAV|,3ciNH8Ìu*$j|yKp֫ J18&&*Ӛ,'@1%1 U2 86EE%SyU`))H+P@{6ض~ "JV4F㼥Nİ@`ePMH^us&A1, )3CkoA{|4+}0qpjy\EE&!泯T72z&|^hnF\$bԩHiR 1\L;[ ֽ&Ě"D)j"?Dp#e?dMxEs3BM9Lb,1 K7Y 6&Qь(F /{<լ7+AY^R b )$@HN`oa8GB:3 `N-OS];kȃ)QawN߿Z:hZO-%m)6Fl Ql36{.Nk{Ot'Qᑮ 6tY.⯆|t}=m;HV-nVU@+k_]2or~8?C 7|;,nFKx2-"Oi+IHROlml{|w7'ԜqZʝ~T:然x0'x3 ~ֽ7;OqF3w˳Sxb#dB}DJ/=̓ !F/B"D)x)Try,9<; TsC#!n8Bao;B,%gmIB_Y}TWwvAB]CIEV$R uĆS*F'Sw\ff/[\í՜]f1KmѬ˪-Ы[WHuDlU\$Kl@ 1ҫwNɑUNɊwkb.y(7OFh^Y]1V]RU^KRKP°FIOߊ7zEQeWc͡aJ^ S.V0;4 rd=gt~4RpǔP*kz ZuwԪzx'KdUw+`NwZ.[gVns:UX*:.~CLD䷩:6O'Wu̞\4U]Zr@nMr0AQa*X{U ]4cKf8<ӃSu9Y5|8n ` OQpcyQr50y^u"ĈrBjդ˻/ܹWnh%IΙhГ^ 5wH0 $:+\L=G 9Ac(-CMe4g: PFA\OIЀ!Gd =R-̱^7^ѺT 9gXc\,؇'m-H0hD#y0{`N(y&CDmdQ/|8H)IϴZĀQ$;| J !0[i4sK*Z֌rt1Q2a4GS9L&4LSrr1{i%" )0.2 ͑Lc҈`BNd6SbD0} n-w} hDKCG@.tpɖ˜1y%,G \/6.{l NL#rKSiXgf+Hr~ *PʹԽPL5mzGgZ ZMJYr[:_2oL e=Alt-/e:A? @ 5x~,S\9z{zp1qHJӫQSϟ|Z`ꪼU_)(%o?+޾^'ˏʤylɸyɛSb$x9sj~#^N7s/~w?]e0gۣWI#/F]Wh\XS1QkܜSQW;\*%(2`FL 9R$ Ub HUmLqe:ws0l3kݘ|s!L3$xu"yP3kzwl^rk%':Qݛ0%oȃJA'$B$ѫe^\ٞi<ya>5BZGJzݰ E(M&y,CYY*.=@H2hH`I&a2/չ2!`؞Tgr2r=\U* ֠m;]bsaaNq3+24ڣaNzO%!HQkO֡r !&J@o&2wJ`L[HN+pdIG$X09eU92 u Z@] ^eLY(B@F!ko}KRmE2 vaP&@^})-^=%Yffq3; lt:94G?8*!{g{M^W(2^߽b| l.iKwmf.6OjJw^wmf.6OkQzkVXy<3$OuC;M88 `.\8<8v'8f_Ez9OO>^[uK6~<<|O&(Q1o 5^t1rO-ma I7; ɚl{ -I2qF-/$9GF[IIlII7#[XBFHWf$x KHbIҩ t*%ڒ5,'ۊ.ڈ."q,W;cU[L#&E֣.՘mTtƚطM$Qt$)LYfYJ[V:t13q'm:l7VMHK(sMw';N?-5Ҷu3F׻ֻvvcͶs(HDx>4׋wR%0@o= 7&G$ץT:,׫fE￟NԗG`[x<-wB8ɹf Zhi֮ۗ}z ^nAA-K毥jrzD_bVJ`?&}VTcL)bcs$]~W{V1=1=K\4a"ђa,j,_O"YWD’+v_WBn{{jF2mUX5 jzᴼ/~>[~Yq# MI/c:{ɹQ3e<8OE2 ʜ'_nO)rv2!aȇ'F!ۨ^O;JgI&i6st)5^ŧal[܁̌eԋKژ/_,:*W{ (%_gPǣ6[m3ڧa"\}mxy;)г~w{;)j'ubrp"kb!9>SEx$!TWju&]I@V6ʝ=}b=V_qrenHA21l$T-#\ɧk%M8[8XC5Ӥ)7!3^[t o6Vjt& 8v"q3D+ %n߷"꺳F,vۧkrC(K?EW(jlުµhEĴ` v SFZs='P%5A'6lLdP񊫠xTYR$+(EΞ[her$6Rw1m/)l̿ZpGSHòXx2 +, BW#wւPQdm(kLj#5y[¨W!0Cv̖» AgFk 6!"$!2ֻa$؞qu|pw4f+6 .eBָ(4I#fO)8]X3$jh,e\j[2;s9Y;(Aqvt֚JBlꀒLGmˆlAci(2&W}9Z]j[a*}\|x~:9(#>(?Q&/ؿ_L^ko`N⁔uB'e͉Hdʠ$@3 ) e3C@RR (䦍\LI;O /S[.16wWϕɕ :]{I^fKЌ@{?( +>#-)60DH Lje8jm ,|umvJ!*Fa`f<;(i'A0]$MDڣM㘉qkwe|G2A\<: fw=Ln%Xɯ=:~4r_ ͡ȝ_0;w E}5Y9#˓Zlkr R[l MꗜvٚpܵG _V n0E>hw (dODi=i|5G!&k<ߛoE\l߶ޟn#ٙXH J"kHb0.,2iG@L sᕉQKrxd°g* ݚ5ШIنɅLx$OZS)Eig,G&X8Kyڠ:ܳA?Y$ψ**v)S`0E)*{M>Cic`;"ZW"P& 6\UBG +m[dqlMi2rocC29Bؾ9oH/:B癖\5A3hGc&U5Wkjn05p?9::ٵ ?MilEmUpI49Ak»NZyЩQ=roc6_"ފ %0(NTh5nw]1PWd3Lކ@m/LK2LP.Fś7~yR|1zc%C_?zե&sn2Rp~3)8z!.LqSnV)ɇfy~`pZBo;q~|Q3n . uХ\EEm e-y>/ˣշWTD%d9+#L\,Q7&5BZ-,ynLlꃚ.j^N$ ٧Prf2֬NߝWT?5YHLIz.CkXzcܟYWyjy2u͈.GԞ ~z ಥw|ZWgl ,oRح&O<{:ufm,m\WRl,M/4ǤoT՛!ܵB mS B!a?^af|ZA&̴ban$q$T,aҒĀ Ω^#4Y6IfYSa\)NHQtX03F+.'JU*D}KYM5{zL&~EbX|su=?Ͽib[Ç3 YQ\C6w/rf{#ݩf7_ 0S=/uy,x㆝2ۆstֲicԐ 9|M?O"LvAX宺t4VwcFuh+QJwZwEyxX=p2~-ӦI)ypG;(;u@E?*G9.;Q,djcvӮVkp ZO[8H5ѻGi(;}8EVX7a]xGal;~!m`Ñ{0ч8% 7),G.Zpp Ǡ]#i48=v HBE4u|:00&|%>>:a) >\h4D)؋SaUiq23^*iST쭲BY "T`旎wŽkˊ5KNGOw&{SnCh9i:\Ϝ ܅>,90b@̗k?,ij{L!Srwv6ބӅ_ ތgp ,}L-Cr[.pf0][О8O~SEnICUrR1:+vc/:ӽ~ffT'Y:YDμ n9Yl-8 ;t ö۫{MͶ雀E5Ͷu5!uZyOmd]/~Om7?!2f;65'(F$yo G,bWuuX$OdɌ3,$&Fi1#bK=#,*Lf 0 uw+G{Rml ,,5,͢8XthK` \kŘl5D-,cn Hz]T ɳED/Ѫ,A*Yj6X' /R8U&"IR&3$JFZn21A4g84#F(Vی"R̝42mkG[ݻsˢ4d;߸lnR`Z'DsA\,7D:+w`^Y0v胶Idoj+7C@/xMN}OྚziK;CVe(g<<ҐK;1*n%It{o.3ҟÂPIHm8x grCr I-an.m.|{61h%%|ϧߛe;P?IǢ2^a^\n)O_VIsX.P/  \$4a¥8\Ʊd3]ʳJ[C1?c6ϲq@d6!`]%:ܫrazZ|%^͙[.ꝶ#/躀3#m̺/ *JNcu7/ *5Idt&xP (gδ߫)88€^= ^A+FRLN)<|JPhbS?{k [ rk [)l1;-W AU;TxVP_7Y#Qr$8S#LROg{6Ip&GNRVIƢ&RA*I9؉P_\r *){I07kR2 \cR.R' OC uia2erh^5yu^rZ;'7cwE_U~Yþr;~|F !mi&LaXn)"?Bntb!RK2 o0NgU7֦XH SREE ʍ,;dqs>x!>_6^`TDf#VR0%Vނќ(oD\:!d6ר=;6Gp6Z3,)(hʔu0EH05h҆KRR@3|qk[Y¦_ 9 ԢLI0aZ[HPXmB x䐰1lw̞%9`#3 ")L!pJ{39R\T[pou8YΑ͒'1fp1sf4q*[Ÿ)>ƤJ"ڄ\qLJhr}GTqyT*t+N2Vz '{Q+0/'5L'\5>gHQ1--`qᗑ2PX,+A"hk+DA$$e0Y:` 8 g.>CS?)7VRȄ-+*GM2)[Wk8a\PįZA A%D;RmAӂUjK 7#įfv@5Q (1ܴd󫇽ÐZȝɍu1Og_lAvnG;~nU~g>Y~L?wb8yK/ߥO>fHqf[jLc=&Fc˘Y̷Ƅ",z§bwd6wuGyZJ 8?&QRq)FRyOZm}C"wbrhV.]?3!cҔ1ĈZuPՅZcʜװ&e9JpCnEb PXVS5:H܋.41*/aDw~;5#YX}=0«50Mdy\'X~#v~ub ]G{߮6(ѿ, y,lB!b:maR}YC:͇zz{^P/uBB̀+8*Ba@Qe:t`3^95k|M4H }r-/:;V>^, +9Ͽ |XIGRm>VC Ve/SbdiSAQ Q:CSAGSFq5 ]8cq(=4i(G9[Y_~K GL)i>hiv'kGB-C)̡F<h<֘Dvx CZPA XT~s# ,D :#dC Tt%* E?b=ĘL$.m#Y{th͋ݢ(ĥ%`BWn}I,_= Dzaj!dp!b'BsV'~d#ᗱ*AnWP DiAуg'.F wv\EϽXqXGf67uM}x^Jk+~VC`1e3`s3mrco\q~mqm#1LJvhՅc;Uۧۧj>4ծSQ=UȨz%G*靮Rۃ&a(kz'XO;w1νn7K2{5^I:;I+ƅ%wf2ub3&g;F 7na s^~2G {bڕ[Q=j0? u*1aխwq?«{ZMSp~ݣCx7v\?k;Iua9<0I>ƣݰӧ߻RE7#b4a#Zrd{, b0䉱ÛĕNZaScm{G9/;mÜ9m% c#ZB1Vc4u*:=B(RBQνRGp5m3'ۍo e1J yο 6AO( t4 't A4:cc;h@zb =@v C)i,jV+TK0"_Pap@ڃc N:7Yq;u !mi%өgVnt\%jEש}bXɵϟpBpևbkg޶`ncP)r??r?( ~AC6BV]wq*-3*S, iKń e`Q5La2gj6_A9!5/R]e-m6UNrRͫ̈́"$eGwz@)Jm(M 0<==3==a:7V~/Vjhb$dCD}V2j2SXk^7:/{Nb ݵˡqN)OWl$dq)F}*UE໩Kچ&Jm ÐcԴcm ~m|REO_ԫk=8q[sݚWUĥl1󧫖(vu? zxzLC%{Vb ~-K{AEwz>%)d33vgsЙqfz= !OofqMW\99]8i{=\e/޾>Y[h{\:1Cw W>93sXM R V"Nedt iL`Ĩc>2EēP \M>BwLga5`>@Xpo0^Z^K6J'hzu$}yUɦ\(Kjȅq+Ά.F[ģ2k8 + Q): %"2ʹVku B g@8 Hju/s/:;YR9=bZ0ǐWRX=xظh(EG8s`1ZݳL-l4sPR0Ldz4],Ͽ_.x .( RGb$U $jRO*I +(;cȢH=S! *ؠ9I3:݉ZPPw Bi0LGUTH+M-$08R=rL͝ז ZaW+wv^6\O]Nkb ۊm7|ڶ* 'Uy+0ȉh#V]p\I`*z9Q82Ě 0lW*bH7E b!xnW::MXD;" F&^_eէu2 3?Z>S(Ƣ1ړF7|v 9lT4g8ʩ(* eđVjcj!9ó@ 1timX#n.}(YрRJnb⠖]xSIfKR f;"D2pcC+ > r,l5)j2@R)rIYFMkx)/@0"#F-"-XR+|c2h7Vn,|/ks]n)G 8 o#3NFF4 .Ĉj=D`s8#)"ؤ6 -,79,8#, HfG-vD0FAePPBWa1ў +*řV* P2(lj`rq#N#G< K" l-N2m+!˜;KKBB`RȚ.LqHaH1c6H-@ r3qDf0zw/>"T<ܤ5$Qpe *ԿW ~u=2e,N'Ħvz'=>m˧V,|Ҁ]+0j~Ӈ0FGg(3bB(D _vVg<|̳G ስeGajooڑ{\4K*/ߠ~ QGgiq/ljl  I %sr#+jY!I* x4% Mm;L(4i%`FHk@⭕t|`QJ v@ yX5ñGtuv *Lj9MO/X4_y.)ej/(IkV4/9U& LiEY4\@$ʥiZ!;ޏO3f~]\q`!nYejvQ_Q "s@iIhr VSQ$*E ZpkK8q<^Sq0 y>Z6g'aN qO Yv3Py#ZL,Xbo$bNQ.| eErdi7;iRHa&^"yt#@@;>1ߖt &Ij8ەEe8m|vÇ2::^]uYhBl'I /@$h\?QcNke\ܜb%!"B+Q^KD;jH4hg)V3GƐH脂bOe*{0*!F迄jTo~؝bNM%ض]=V4T~K1Chq!OcإAXlQ[í4`5PWcm.?ŸnVwTCtwxQ Gk` D,:)PQDQG%<(2<ڥAx_#L!¶BYȆzY/ xf6%_N{Tk8}`w3o8h8]|m,mE1v7I8ol 薻Wg~i{6:5Mph[}~Nn?.I7p+UW=UwOW078z)~gdfre3Y!O Ҷ.Q"#};)6a0y QAvcGV⇍: kv;K?uD}HA(b Mjiz@pTUaD=Q$JJ,`Z8]k87R˫^f "C3Z}uY:,*w.?[o&+FHT$G+cYAɕLA?"`]AtH}U 9ߌF`%JX`Tp`e:8ĸ ;W 䃠qgwx[u*#V 7l1TY S-YmҞ\A^Rlx!BǗϢT+UUΙh8ۛ+TehpYKÃpq]a Z7>,,?g2t9?[jS}VW-zwE/>+4v&C 4 P!R2j&d t)CI8pF5`G#ׅ۟3C/8S-id}"pWŴ<*+5'ibl!Sk^En Z {i7'{/~L;(Pˡ:>_,nlv?I15;u6ʼncK]ROQ/nnׁ $50?Q^I"#yi ԗ+<[.mia t:6}Q"xƒ>[¥H} Tb]&u&mmīΞȚ`:3mRdqH3G!uF]δ:c[ 1)XK*36 TN"cW#eEO2K2'OkfaLX{BzZw^ H(H IQ$BnMQk2tX'f7R5"Rj2vQ (mTWk+DŽ1m)?3ŹiźJl84 KV+^Y`kB$usSq-3B6m|N\J8y=K޲O>\TE FӧFKe#oK5n'Ws>r p+Lav0i6lv{=?2CHjvQ:K52)̾Pܥp_Gs&a:e]\|zK eEvB<^rrY8cfrc' z#/s-ʼ;Ҭa]/t "שgjrV)Eϳh_֓m=)#TIul1P5 M?f Gj¹|*9a':ۓfT/ߋ:<ݗAdc(/Ado۪[F@ wƮ&maC ar)6+lּ-5$~jɞxw 4_ D>H]u_N0I+* ./5cszJ !I_S~6TI~<mh}8ӹ KZ&bۀ3AJԨBxdk|Z-Kc0~}e|Y$+)Sl[Q7<`^3ڷM@1CV)z(QQ1܃c pr ʿrl'[F>E+xjxѬ~sDJ1nU:gqc~́-A rgTF>~=K`2Uwrc;{ЎF"o;a'm0z;{ s8c1@W9; uОۘejaIₔ]dzAi-q4)rm :v2@= zvr^WxoS).KR6}elf# VrjW3ӟy_'(ħ.jSMc Ҳo̾T)$aVJzxy##h羅zš Uk:ٛ {iwuk5ҥfm&+!e""XIe( ^AD)E EGp<ANaJ:f5'Oڌ\\IAme!22JHk[^̖R6יJ n-Q|ݖel%kcԯ`Du6\5%UV()HBh*p~:71(MQ0Esأ =+w7J*1)Ri{y@d+$[?"ǦA8O \fӘ4%rm@WhK!&fPr԰)HqUا G%{YV)rcJi˜pAj20j^t[&EfDd v$iZcD7,AA؜TSb4h-)3Z?g?PHsXԟ'Gin+vMzvMG4j`v\S! U]tMIZWڕm fA*e4L/ A>Fu8uK#cLԠ*;l|X,bv1f㳖CRڍd$ z!2,}4%%V\vǘ 0ForYe[oЅ}FaQX|hu6G(_!mHB+^<\ KȎ yl )h~mer>!;]v`~{9`!|W9\|Oh)d[6H+3~f$Ah W:A/ g嬪VU.+&Įb w[17Fي[1P ȩ/֚1PS"jԛñEZ6cd;[IF]JA̘~v{LbhɀH̘)#;2a Zd9PFOZٲV)Rވ䃌.BT3͌ nSVn\v`gh d)lY(@RX dxN&>3 {: yYd>}4tEܲ׳tcx, ;4ѧO7׍Gڳ!e..xrQy.\r.Mnfճ.f۳異?̒/]l/dn'O̅:I4̭xs&al+A /?ͧ8 |x`ɖ߄칥EP@Ѭlى?:]vX$v$Wb24kQqtX䝖~}A\:8D9DΉrf$@)3Z u@v$rnS9GM p|za!fu,Bk~ydptE/C I ;XF+7POVXb9FH) I6J$MR~;VCsfU3C5RIn֧(;kj'u.Jܢ{KH$@}M-Xf*Epy]VP:nAE+K84([ЭU;KIy9+E }ѠJC_bv&";<ٛSQ5&5TVc@(7%w^v(Dvv_g@[IZpwmTg+t4ٙ`(xZkLmC3E}p$:YN ~dwVO U [1 p[=d0Tgy阃mVX'e!Y (x,FɊ~G1vט̘κ *TY"Iߜ 'Fq8'n )@JӅ;Ql0\;%U=31h%A%ht!CFoa]/I/LxD(kE7w^vatA%%Nɒt*Ň$`* ,Ѳ QpsP !j'ܻ^rrJ$6\C47~kilڵFMmb<4nR${XņBYQ"Ekک@-T3!!Q+}`bWhݦU Xx'~ ;u{ژ @N3Eʄ4ٚtSq쭓aCndEcYCWfR6=  wrzPvu!zN 5U.'2BUCe\C-F2x o'D:PJ[# =1RJ*a|BEj&w@]lusWgE68Koךh.*mNE`EV1-^:b ֓\A6ԔAMIZA-f!.8$%F$1TRbuRBKÜG?AXFukķUL:i*#CZ ^D2Ha S$"Z+pF́""s5h HkķUpK9#ӿɲl -'nJQ;R\|ElU|TUg`I:FBygg <~󟃹ӗC7jO+i^*3YE| ߷;x~{XGГV_os~>ق$J#1ul{pw7b3@ RI)yzqv1ӗCrP9M; @)X[/S?]^ǟ#k? Э>+^JU+2s Hk NGs^/v}S/ܙCӹoKV%@|\o;OC4%208]F4{@㇦m5;$Ln2Le,2d}arِpBX2OS#<rlӖ{>- 9zjyFRX{ r 9Ι5F'>-9Ln+xሴ(4&R"5ja\#&`G'2: ^H|ji v $%Z+>‹qvǥ\F[qneanL(Q2eAUE+MrǭӧYbq˲qɱyr%Y! RufgRPj}; y.>TVc&KuNsv)xe+/?nF}%\zB"U~ \-$ɤ.ntF}? a>B'l*r $=mܭXuW3K.G%yO‰f(ja$&NqepMbsZ+ؠ`IUUwESrY/O^f6|VkjQ@e[9R'ZyGJ(jMc {?|X 7Зsd%_&#uMif]̇8ksBF21]ö2mn-EƉ2[3p̈́ogDcq h$FQ- H@C6 hklЎj"$%"h:D)1V*mP1S1d0'3֐5gZ6))تiVl߬mմ;ځHLq"P7xӻạM\UpBk筓ʟ d<UTF|[ 3 3-LL99jW@%M Q09Z|%'6^VֈoZdsRks: 5* P-[^6_\$GRq1*xJ(X6BPꯎ[ 'u-S> I%;_ėW8P*"7/|Sլ[C>_nGghMYzKk7YZ 3 J=Ol5r9%J^35JСf8צ]l-]]8 ?7 iHb& ֧%x-Z&ԵhS! Z𿬮E~ ~=PlEpw1l}CQ>JS5C߲H(2#D yB0δ5oQ" eV,ۈQ)ȜRGIZd>=ix-Zj-=ۯ"Fqec StGeG.P(W)I C)2ʄ}]Țxg3 MSG7"9,zs8mҕ8/s}ߓ75voU>kUNkN\}tS93wMc2֦CS]v"<9Z3r]^~-:/$#^Ou&W٘"S}*FZ( .k.YLe!mksf|4of܆+ø j]|r)Ǐ[^>涶,yvmٵ~+-3N{lJ+WVPɐ#y݄%g7䄶8Nju_~0hi㕎z؉.(*'g2";% 4dcG XU]AImOsfSNYkw)2~#Em$Ty|>M2RvJ1Y?iҌq[onMg6jv4}-3Zcq3j[9P6K͞6^Ѡ3-d|qa}8yunβOR ov~P v}5=zYCn [;yNG<$"C\6{ |imC2^{V*3BР% 4p S 8"*iP]]ǛOQ}KuS9*xWTLAG!2Ȓ2ȚdE|!NHWA`Bl|L@gќqtqC*(Ȏg*Gt'S3Wa:Dt"i*Їs4ERٲ ̒_`>"/.W,|y@gЙ0St Ji9M]?犀Gd=ƣGQ9OqֻֻcYV~waYqJɃ.4gp:Կ _&!AROj?G-?{;YHk\7w`ʮ.y16I@TKDe㗟;Z~=?_\ȁ>|{_sL6$uR\mqW6~%O76g7B/oXъ|: X2Q=:-^N4N봾kN&MQèI@0#9+ 14fRBrKmCm[ X'G[f16ْF;+%҈y!H^jp^%:I<VDmA=GlhCx% ΞmB^c~Jk;h,m$A_TWm %YMl is+W4AҦ!X"9c:%#G*k Oh[g ڴun2<.%re?m605ԲX SDv5n..w-eow7b 7SY'.8N҉%J&'Rʴ*I :rtS6鲊^ p ?ޕ%$*WIPf3vx Y?P]N _%|[uZ@F)Wړ sEH;lnͭlP#̉4$7=}I !\Q겝c P͂:5B䱱TJٵ~Fޛ}TS>ZZ~(1JԆZ ϗj!G\pyr&' L#m #]3(L0RBiͦ)yfK% AUU>4ZBbЌ0Fz醍 C7,7{#G!Y9^ڏgidV3>5|ɝ?]SEz}Z69 Pq#}5cP=e&ȧQ>-j\g™s>_^(kEraɄ; \ z;Ypc+Y K@ZU~ua WZ+\_&C|Mڠ@JzB3o&͔cNjjZ1OIn2Ny$aDʆ]fSaa.2PiZHI;HȘD:@5X4:D2,1!$g!T$LQOn$3WkŻ3Z`) @Mg_GUtedt5KZk?ZWiXYHCXrInҰ a?#IwkR 8Pjc5[!D9[,Cm50I Nr)eh.I K]ޛ99?7],@BIpm6f+%)to\R%7=G}&@)& t{mµc.ۢ QN=:|jkԉbrφج*=z\Y>q6zsjQ€/=[멲JN2={(4jM PX`Nt$E{YhD!@q9ejC TA速jG=1A:Ɯr.-E)*Q.HљPno 5h0i) R`- v'jCU]Z"!:s;ƪ`q@w0yFW^^/~ާ1[ohyOY̢ h/z8x?V~3ذz37z m(EE !KgȮCҋqyKɯ_sqV'{ 8A]H9u!_8)4h|[+DTZۿP3tvQhl:0cAۍgH4{4ϝB*^Rͳ4XHB;ǘ026$XZx2-_?PcE(~f p2e(8? Ñ-0fi>qns90t@(hb6߬NY0 /P+ldc C1QS+?(- '9ʔ 8$LF8cy ' р`78$1;(jqqវ܉|MA! i+2nV0}wfŅa2m+Nn&jl`t\-–nFB-FOqk3}gk!hV6:J)O9YsaM(Y&0|#i$"Gy")XzCɸސb1H%r]) H$_DC) j-URz3)J_P4Yc><,A-Oe-c'v,cM^t>ڛ_FNV|9Z^B˰ZdZXz&$ R)-P1fhO@RC~J?,Bt]ā<ǻQ4f4TqE )Z+lQUz8wwh.~NZ |U^R-xVFۼI!}zS-hF{M;D[Y@؀]krMܕofӼD{iI҈;GX:A!0Go& Ғh y=HUDŽL2r>?=ʛL":oyƙu+A5:S`TfD|־Ct_Ϊv(j);t@IwC~Z~QWYK'RTcʥ} !5)t\ /Ĩi5mΣת Ha'[fk0p"%`ŨG@uT"D'/`E ȌhcD…'t3/eOʞeu7L7n VKThl(kYyR02X>lWS ~sLoEû2EhxWtw~BwwkD$Mhg)ݬ]+j ײqYܲq~Z66M;%VVN&aʛio&/]uLtPۭ7(V|!i'; \/)&W/t.&wK{|o=} br>~>ݴWYD-8 )r5&aI6PeޙLPWY$ D=DiIMI,%p5;Vנ0#1EXU&~_}s{+k=m`|nfOSi4{rT|4>\{{s=wR߿^c0I)Nc l^01p N'` ̠e5?ş[#Aw\5mSռ4dx3JퟌGyru'Y\TG:hZ9+"Iͤ5L\#K!$a/]EW?.Umb1-2K!K- I{"tcw!S)cEĹh"zB|BU1,!SQyn+AI9S#TJ?T6RQnn J2 vAy[Sxk>~.yUԿu4UMs=0i}nsJ/и~O 4V +Qx:*`ږl_O[ˢPJ,(-(Ih\O,?ÍE+g{Hx\Vk F 44c͇YCWzᆨŒI%©Hn1-X ڡcޢ'6&<}@~DG(klTSfO ,ϚQ&vj, (EO8KPJu|jsBs*h{hyF0/S˳n@/kmN!P>\3줸ܕW/ὌyF.(ʂ5F8Ҫ{2 l R@ml`%<b(Kh 7!?T[.\,j.VꄟѰN 9gW).̟_=817Qa~yAKQzCv> C߭WjqM洣 k[[7L|jި.AWd +?ʹ aٶkB`ŘHJ.rWQ"RDu@a52Dţ-X9eZa-W3\ՈPӣi+"##يȲ+?ĈST2ǖeRB1=TVYS8/Ku %ǒJ%][s+*J!UpjINf=DžXZI忧A҈X/M?54}?8yڵLkA=D~Ks IѢU-s Xks(alEjlҒBYKiI҄D  :FC"iK["vE˫U1)A: ^WNv١oKAQFf߆4A8%x^jhPhƆ,5dg w}C8^}/nyPb-5UCMVp]I僧CBj%pXtg X4kBGRYb/df֮t jVTTJ_,`+ 매&YUO#TeoApF/la^ !c#\yd_H'Ѡ2(|n<9+] gZZŵ{uoa7W"c穄 mN oWԈRd)7JA:fY'qYnpu}`AۻS&ް 2-VbeTX-9P CՑV#S]#2-Bdg¬G2}D=v͘ty V]yn,21{Y`s9O tC&8"<d#{ ()c?9Y"!H$kxjvd<+w84[UJ+U5XU Q(,鴪yb9ܕ#:fp PV^cf]34˫\V IvwWg_L>G`;vA0oڋV;pjYxRü۰ 15<7bWկ7zk{=sk~Pb @+n-nI̡:C/"0F-KxVR(GJ.hb˭RdEqGs)'q&J۽(l|BӾE/@d[8:VPY&2C`6)D!@x'g*ƕh*h8Ѫ;Mx#2ضe?n8lG8H Z&HCw .n-EJڱB+mp=d%`*"[aj^9Z9[SD@&xbf7]!g@&΂Bڇ2$xQ0d,@ɹ៤ʥ2 ueVQ&FV a#vg%*N__jY}O8Eèg*Fj4J;ր>z&S}BB0V:#}`1_WxnvB|coيCϿsρ9B=k7T&@I"h1#g 4.lm]lf!/F,OlS%n."}KM*AuKVhSA*2;(kX}vX<<W$9|:^2qٗ;C$;vEfto%*l;+`F +e~{Rw=ml~=1M~Ɨb~܇@AYStq]t&'קȞ4KLJX)a8FNzQ Xn.gS;XO:D >YV!Pͷ[ֿq)pl ©bYX\x,') sՓG%w׿^̉y279΅Jts=ﯮ~Y;OwiI(w-m|?w)'sv%J|fowrKqH9<FcRX__\]Y,7y?>2`&?9O>͜;cj#+)N O;K({:nk6!U vࣿ(FM3xtlpTyZ;mvHk̩Qb h]W{ Vޟ|OeY%̎JD;7pf* R&VOȥ l0 AE֣ΩE~ms%UxO|;j!^T'`eVlh (BR5Vedu^Dl"dv+l'b,P͌!9ȄVXҘl 4JU1dS NmY.SCdc{ Ӈ\Z}wQE/@7Zb#W%4AR15K)ALfOl p럣2N6B8YrEH^Ln(􋃼r&V9[<|jr'sɟ-Ο?뻋4)`GZ~<375샡S^& #'tE&)Me-M8MsA>ӿ>'̵gJ%@|-Qίْ|*:D\ Ft@653}c/Cük6xmB >@^n ZmnCyP:CǺ0O[Dn'Zkݶ!_.T_+Aϸngvu"=w7'ד.r۳|sŻ'/+'/ΒQ0x*4b:]Yiә\W6ko^+n^1A#iQ>yTD;5OO7g["޷r`X:~doo]]4UhKr9 #ܣH6ªHڷ{z<Ԧ  }N3 c'ےQHA0i?uJ:Gzy(Qۺ|t N_up b$,J]%8<\~%*ٌ,#$y*O9ڐH?Vuuͻ;ņU)4=x%Hn:,@2RpyzK]9 =Ų j 0vAŦ76=@,'_=~wuҁVx3 jwS SYYݾgG[oKK--2CwIfusлEIֿޔkq2293ӏp79J-Vmo~۶ҋzU^4)6 Ye JQQ:&%6RnNVfLſ9].=ǝC_dq9~\,-]gyy< ~v`>X(!p;5˽q^)ƕb>VC}&-lh"oHs$e9jY`_lzB omCQy\t}b\VJ$k0crc"MD̟?+fuIUcN𨚒oAT@˨AFBP#]$zJQYIC)9+*AV bn˿@m"`Ij%81 !(a=YX<)f(kPɯ@gMㄴ1^gXu:91I;.H:? Cyo5 1|ۖjG>+ bW<!wfgda#[̆tx9W4 pϽ0pge]B0k0DC@_9'\"}`.!\gjλmpHR.[:JJZۨ\rԓN[پ~+CC^9>ZcǺ 1XV JT1soԺ!/\EtJI[(bP:mXZͺ5 ZjАÑT5c" I a^Nv#9Eݒ*:+8>iz:KOZWXEN4Kl7  ,0a87+#z,Le,L%oؓIFMtޓtM‚+]sTcET!+ͦGiviz8 5B|eb|I #B'|X@by\GA11zÁQ1<"6yoqw#X诖lV2%n!,YV!U(FH,+2a=agP֚Du9'Aـft#v%Ԫ_Љ6jS0gʡЉm y*ڢS|?֙z׺ D,i1(R6r/#5/Yb6pm) GB޵nºbP:mXw=y<05ukԺ!/\E֩t`c=?Z}.VD(>_B^Q+ 8ru7"81B2dd|6o@CeLQ?sRb 筍@G-8JpP-갢3K q)u3w %KxF,5JYE ΌEZ>-MeHEإG2^cRHK1)qb$)QW9yu],ep9ZM[):do׃kQ|hbjθ ۖ)뛂jGܫHY'`GAَ v/0hs{P35pc޾mZ5-W0\񼧍WKCrw&Mnc"_G xqY~]AɣT&woy1iv>Bp:fOq-Hs+؊5=%ӿeaQ+|n@yCග]ݐ޲̀8|c6WYjw~i? Nap0{:%B圔re4I3hq2@^ K6_ȴ$!ɿzi-ɻik0QkCoO$[^]Ý}I: ͍z_85a_, vB#S"4GVH P&:O1Mlc+!wm#KV5Q8Pj)Ucd(#'㊘hpR)M EO":Buj͈oZaH'h1f5`Jf9'X"qĞ=m?jUha^5#jJ@jEʥU-.o[ פ&|cnL9$qkj7w&dkEKvH9jH"zx=.zx>ލV1jpάŦɄiuxKPW)>SxtRIpq%8qVȗ/do:n΃2 _޴Rڂ|Ćy J]_Ӝ뭓f.+%R 5)1c 2|`h;u'M`D{0Z(C/R. 8Z }ԾPe]`@S~H~=Yum0stD1(Qjp|oU/{|w^`4ѯe=m5E,QeOfRju&GLu/.zx5l)-wue)>}zvXQ6kpD58j >Ŭ+C:pp͇ՓBr-K:dd+H*-&M}>O: m"잭\ъoMJyJIJ 4Z@O[T"f%́/ CdhmYh1բ ̞A*M aJK&;yF3"g>2G6"(Jqf,b2%rub&PMTb 80;"(xdZs0Q oh2V OUC1y8(uVNiɾdF R7Q~)sj{ɳ7Jԏ!/\EtJvVt֭imƺ-ɜsMmݚ-nch WBɎ~ŹcݐnNk5m]Al֭YR6pҩheWFPfeGO(yÞO!i}M8a]U{GgtClů?ǟ:%oOrq_K7Wf#޼_/~ bKhў_<|bѯ?,*o:)mjX ٩~|oa裐=E,*:u2K兇{w[9_Z̛9 {A>(WGI; m͜B:;hG}c̸Z4Q8J1u^z}ޭ}:wӝkgǶ+yy|3$vҢ:!uzidddb[DJYJyӆ]HŞj<8<\kn惛>ޔ;ivapIuک??߂^qk ??NHt˼a5#omr>^f#y> a'ǟd;wuSl5 'Qr+̛s{:.rd$|@M-xP%ah}H-9I#P8^լ 1R,>Ir"XoNдDRE"Ŕ:;ZF.aTՓo $!r|BRHs{t""tq>9J>~ L%?<~fYU {~<=mliFOFr4y [ D~6X%^L>`F_-qε.ҫ%: Cx4!XNSuڛI|<+n#>e6/]_^O{E?Wӡ닑@`(q^U& BL1kYʕ Iɸw&j답6T_z{,?{?t+%sn)]^0 B *K3E§z 0$ԇt2[\Q 9?q B,N#jcbp4 sT`u{9EIc6m"+ZޛӶ1+2yRޙ,o98cb(Ӭ1nRy$s՚ a^ (4#,'~8ٴUrEtaJ /%Q*ʤS)Il`*yQqO9&8X X2WJaJ\J A+:-xKJZ91eu^x9opNtx3 x4B'g 8G82E%C@<<67İTnJ T(%WV[/,̐2ǐHs kHJs 2iV]Z荂}/j!ГT(7" o-<S<3GQ*H萗KK=pCȩKA}f5~lxl" G&-^fh‚WwN%Rߟ'a7L:=O1lB4 1[gb2rT+J9 i}W&@36Ex!t1E]5Fٹd j[Wx3cp}.9m N͛# T\mmKӜﱁdo@Y]o:(E3s12P#Gu)nRlS zJ|&ajڷggr+N/"DS򁙣b_KgN|5$ث:пa#L-!6:3ms1&hN YOyT$SU# PgQ9OZZgL)hMWt&QWR8j ,E2ۯ*jڳJƕXoƒ7BPh _z N"FDV`Nb !X܄`BؖнiB4s.=W3(Xmfc=+ v-S)mIln} uF糹rhl6|0^w6@]ed!ě{$Rr[O,qd^#|U<)\u>aWfB T +DZ8RO:QLUwJ 3U(pw;7t?cbRi~qC* /c F1k$ >aъ2yy4Y,,ȤLk:si|Y١^냺Ȇ~hzJxԞk99z/ۆ?L"b?S ƣxtz Ѱ{x `Ï>ou({(z->Зve9lK>ۃ  j[x{qYY9jMzXsbK6jX"n+)F-xrIk{3mfTm3Sّ\Jbop+fVik/MQRS4=jð"E;N~hlndu- xATj]xQ?c:2Q'&jаmi><6QJpk{rS#8O2HLu8c7<2-Tv='`5?e ]'˳$40^vp Wӛ?|y|7o_^5Q:6ǟAl0Ϙ/_;ߞ>s`dk֝|zm3NRcU2eYm;_?ɕz~ƾ0s6|~{SU~t]䍬HЌ:бy٫Ž`_6kAVfiq ʯkd]{?M_vW4"*:T7dgIIlW|{kҫWh >2G\1pZ|KH5T%|s 4cosnN$d8qǯ:{zs&AI_e=Zzwq )ˋ ׃cΐ H C|4 -pp0̹.G/Yq8B8)=ӫAZҍ^fg0߾:' 00`)/ؗUZ2%NPQͧ]ޑrIS̥_ԾkCMb*y]os53̄G`15u5F.!_`b2i϶Yω")ES2·r捧B޷r.MYcĹv_EU551IRsh+GU(%1?Ϧy5Ӫl`nS1![;g-0c _^][X`urzgǧpyu-ūӷ^Rފ :D ^V4纃,:~>x.^zZ68LYӬ^;mͽ^0ܓE"+[^VY.vhg՘5 YeVE2‰%唇pe'tR61(so ([uDG0)'9tt.#5Z?v2,0t@DkEE#(61F`< H6C5/AW 2 7 \ ؏Iĩ! Ina8s+Yxaw0fOb Oyek/+sD޼<<wN(O-sBӉ UނD] ZSF]?ooOZ`8٨dy!h$nֶjy=?\#?@RaI5%AzfB ߇gَ-~;)gM9WpE-@s4mnJk Li!9wC"7Lrx+cCQ_AeH}:2f3I ܅Rb0K+(YKxGfM̮nJ8&QĔ]K7F(|ErkGwS?njsŷbZ=x9cѺL6єI&y,XS:Mstőp/mκbXr!4_{ɭNOG/KvPPG5W|lͱþ}{פsZwwmAZH7|0 F(hEaz:OD3]Upq9eծ^-@*N(;o? Y~8hwj+Fdvi56!̔A3D([mr#k7ܣ9G+\ ϙB#FxԎPJ}Ft[&)c2$\=;Y&&Z'Kg]+[`"cc :,S@+G8NSIe7̩8guc9K( P'-0}Pu"65ԑ ȽI#ݭ8,|SjiyBxe3i=eR>!޳0g=.A6%7 A"N͔  kX.4s]5 Y_#(`y\Vn-p ZV2In&j} E3`ᰗsGL # o.8o?{-v{{n)VfiM~/c,2F˩}1Z7Ϫ!$Y:ȚT}ss1sy>1c{-+3\:Ǫ/6 nu8ZKn֊b[K~;ouKahh.7 Bx1,7BPDpL>ڏ'FcQ*#{ /{!;#ƢmfXPvt )kϯy|6 >eA#Ud=&SCC+ : ; qC[v"EBbjKzeŻwniK'&H01#lA;Ra]Gm 2I޲US\蟹Lbz#G cd 1m<OB]4U,>}r>r= 5k{ 8W^\O8amqdb`] EIa֎R2NV\BWRx!,Ҟ}H)W/bpL}Õd}Xq l!Dx H ց$[ 0_r}L*{1޸߳u&.Ȟjs2d dLE J?Ѱg71'i%" ׯOKG1i3sXxa;b8 E235 ,:۹q3cK]ﶌhY^+ <{.Gf( `& J6/ޣDߡa(7 CS8 r<ٵ܋Bߙ k*wcŵh ˉYSv;*j͸ &3y< QXhF_;vVV2wqjܮ+"yqi嬈/=nN?v'\Km:Y/ k;g _'ya%J䅕ȋD 4h8bdpyH|C Àg1) "J4ւG!޼o]4My45}Q׳6cY?ø< D裩nIyI+2'py륋rT@EB [1R@J( u3b*(ѱ}Y Cו`oPnfz QkVF7?Mo橹D5$k;&4Fq"wjK cJs\3Q&st37[:ֽRhrz %MF¡GU#Qr1ks" nG-y=LMfF^p Ig`mq=R8h*Q?wI,xhY2@9.sɥ`{\9ʉGmw os`r{ByXsr &x`E,2\D>$IrD]@v밵wmm%/gh\hWST-TeΦT3JkҊTHJ^Fpnͽym3_F_^Kہꡪ# : ed>6#95j}Wǣ.'E"llA:A'Ye^*AC4Uʕ`ewI&Z fJqaIK6]_)٦˷VGbڙNxNVch需X2 i! &7^}z6m7[wMpj=5n!Q4cAGUv-![l1/\7õ)p- KBJTWd9t\zKcs[r,27 Un=t< ]`TeXS{Jx,8>_Wax02//ڃ_EBe-mYԆ4ުyR.Sֱ&"ύUw׌>~ӝ[W~vcg ~O?ELJ{?_QFѩ ӚN5v^ZUכAMl`Z=j~(W=o|@(S7O mm'Cy*usDzC ur8P;7zLfRL#> 229!Z-IJ{`5'˴Ko{hpf 6V h6b@ލ|@&j!)K*2_j4w>{CLJG^0l{ /n"I!wwiapa=fm^j>nqpyZG<`5ȁ mY8Ɓ'd˭/>Ate Σolp4x4U.Tr!m &CH9kqT,tL hmFKI4%sWݢV$OcAZiȏѢmd4k.%rG7O? hTʸ<#ˠVd8Ό2h]`S96wҌ8xIzX3 '4I??"-J9bsܥz˂/vyB)q/FxL9#$CFw׋O~a)Wbtq&J-ئv2Dh&0R2`$82Vz;%2aU(YYrCl4R4œ3[C`euK-*$|vFߝp6_/;Iy!RXm%pa̱REm!f>Uq)Kiu}43A$ONTb  *uesU+ ' N:ٮ(]4eV`Έo/ċmig"S- S9!*[yƒ(iB69#LERkFZgķ'3kș=6ӹEo2N=<]sqjߐ,:I@ Q"8* rrPk_^x]ѯ >T~[/Pn+$FK՜okW]漥DうNLok.J`;-ܣ[e]DZ>n:(3QQ" %fA)ߴ[;+*;]~_51ߊutsV". p+6՞v~޺U6:J 9gwBGu;?- _Ou):v-{֢EQ]׵޵뒊e㘇XxI^.󄕽JѣHZ<:[Ì޶|YXAGXl ˜%L,i]a<* 8nXB%4gߒޖԄ$agb_ 3_]% -%4(Ԡcf_6B%NFҞv( t5[ʋj#Z8fj\tdbmp07sY?BgL,1 EdT=aܣdH'^̂v 붠pB7Hat;V(%߯2:{uĿ>_b{q)!_hI,@6`_G} u`+yMN!PYU^4X$]ag{~u)>ӫ/zF}Gzou_~RZ#iҕp3e&eHIQ\[h;2wʫ gLx|+[yD\fʸB{BXavRfY;ECR A amJd6eh >|"TqV]iA+6#ΰ8~sJӣ[d/QJun?'ӮA~H+hcHH zCZΨv`3 8P;LSCs }N&1褌u`Β⳾k%z(C,uaβ:?D J/\C$a+ZzoS0RsA'"VvEdj7-=&_{7N2: 7xM0VY1ZJùwh/+'|z'r->2,O(\?nCߢCӵJ/tߜU "SS a32NT;vR^ݩ"$ #8{PMq{& eg, ;(~/ı3 #hcuXF"$ezw;Tc:?s qؙfZa տmV%Ä́z0B鴃1i*XEG:-'T":."EjFzaR;k!-JPHDvrȋR`U\Eʢ|k}ug Ved=J@ }db*=oe-.8x?2܋JYUXTHr%W9,Dkk/)0b!ۥ%l&peKmX*V|4k+,<*|P%G}cc]A L]f i W?&GI GD.Z],P" j tqRE@r:ᡚB5jxY&Pl.}xP#"H Z.4l2i/i[xS;c31TKq5%vF8$SsC;kc AƀY]Bu'iͣ)g#ArُǍ#I/yE zV4 Ȓ>d˫*x5u-j:+Ȉ]l_5 ڋ]\ܓ|D漩*QeI-?7had}Oy3ȞKM'E)fP`/&sQV=tW~~q0vEqM# i_'^]i*_Dy2=i+́eji%W>=~)+ؕYCM}ù+}&ͽ>>]}.P(֋CW[:x_XX|ꞥdq[5[2韭Uޓ<GhE`WL)eYZz%M)Ff(-/ѫ,L;E^r%'Iʒ,JH] M8%(?LȂ\%AKn3BiӇ/5{\|J'3?!Oo#EJ ADgy1cPiZߞbŞwG+jꦙY}) z9W|{rnz$SbRCf:[qYExgAs++ZJZ;/W6jxry$|wC ~e !7:MA3)J(UݔO}kUpnscܸ+f3ZpU1M2Iei0T2BC2Oԑ. ^_N!H`1(qӔKx҉S!*'u; LmgE>ð1ʻG9 ::Oܚ,Ѿyhs ؚכNDin"ID"c:D Ϣ̤jY+4u,(pWlVߩ**y׏_bX'#m*>^"e]>v 6OVno58[ p[t Cco7dž<'լHlDo~<{އBgŒST~ Ʒ[?!p0拿N'󯯎'F\F:Ka\"<K?u̗o<ͬ;x|Z w.7הFB:E8XQ|o_ܑfOs źz@!~2^ oц}>fX|pr95ּtM 2Dfȕvg:sLƑ4Ɏ.;F^Οȉ%gmfғr4 a Ra#4`,ڵ%!KDlppב{k}eEZJԤLܐ$""P2A[RHU" 6w lRinXK7<5!'~j#'Yf3EF]DLdS[/faW 0^ӄrD }(;GV_Pno*g袠* TrFU@*; BG<͢T+f :"aW5J,%RQJ3Ji1#a<0aӻϓbŞw Gh+̘52(V!@@z]ܤ`ސ ȩu;-/]r)u 9}O^Z}Eܞ͵h6*&3f?JQV^0{ ʫOCSi4?FT^cD?W$ZnBSeky^ iS-OڴTbbpճ:l9swPݼr~T;?[J&N9зbssْ%x 퀟~4<Ȝ×T^=0J¸;U_*a!==) 9 cLcxFtQ< `wIu^d| _)=_7eԩ^8σ6 o;HLZG@@%}v a ~9Bބ]QpbWᤤTfړf="ZRN~,s2G,W d-[ )׳"IHdLpbµ"c" bIYMVE6!G,C_T2˯! k4)^~Wcݾ:.eşMx*X Y̘li)cq <`"E4n:Ioˏ_@Іkgэr@>Wytn,ĆVLtg4ɨ&6%LOLY0VyjO PY MK%mcQ&#1igU]BN,)P,1aLrmh$4a ݙ`HqC}պΫB3F \H}/u|[>C BK 7LIǚs~->x>ry6[9TG ~%\L<](9y3i.lR3nJBV~%IpDA,'f|N*u]ъo-)Rv* n٧>FK9baɝ^lLc wgG񍺑Am/!7 n*O9ˀI5FßwO ;+^&c6%ƞ^_?or:IOdK*;B,uFo}=S=Zj4{0g}Z5&C4Œ˽h|AC,4Ԕba&^F~u^&5v>R+G BOjtǮ$y&V bV S ⶕ?w3u/u "Ju6S'zǡfZrq-R[hh,%\YxcM>Z|ŏ>8+OS?Y[5 .9b14,8Ni0@ F +gVH+44=(jكZ+oPAKbSnQ ]F"]|j\\ɁnǤ(i,i\YxXy <,_vp^"x/TKl}SP^zb_vp ĕ4,buY Q^>Oݞ֚O]=ÝqjJ }Pԕ8 i/x;xu&d*uejɐoMeW`ҞJ>u'Lk[J:̚ &wr( [N6:{)kϜjHQpl9qcS7)yo-^_3]APhNɑg)T'0"LVHSk:qh&M ϕe{:k5~~*qsP: D/<>K]%qM%'ku 8^#޽.2:/?}ɗ;T]`CCQ.I 3!Y+*A(yP1`=bH3]0#zMK2/8xaE>^((MK-MH״T67x2]c7 ):ikn4ϐ^-k_0k΋{q)h;GZi14čSe<M fHy1c_ 06rwoU!\)V\O<.odC_| e9'Lvqw&EĔK=_4 f/Jd cѠ<_lýK$Tr `` 9GR~ȕxz<wSY>8p{*Xv;ͮzk'J$^;d\mT _osrDvQT! \.gN #cp$u43`PPcSq*x GuP@SO^|#&#^>xRP90U'g3liW Viζ5.?s[3բZ&6[iU]^ ^h)Х,c,>\W=1PZ\~wk\2` %7X= ݯ~}{IQ0ZѮAQ1PɴQ1"'Eft4"&+s;kS#`cXhjäS1^8"DD<8|:_/M;)IXk:½[JS- 8LELC4@A@Ī?5⚏1HɴHɦyR+z yEh43}A,j:`VaTAG{=󃅡Z**+XiEP0p^ɗv4[ 1I,RcL2x?bF+I' LtOb_{By0v Mp,V46j+` L pB`LTBKq|݇&TypJ/T?)F6Λ>? !JI#+ ! EB/ⰽNIkͰNQ.xfϵ7F*#aiVo֟iQ>[a4 EKwaTĈa:"<:fuH9ME#3XjC%2$hfY N[9(EP  f,}-77M]E?J_fL`b_~(;a7 \wViu$s*C˰]Y2kpT{deΩFqJQ+4ox衄R/PxDsobC Bb^?}c{> DhGind^!_ϕZ!^D?%./[LiEy;YFQ4Wܭko?<ݻl)NZ zCMPPG$j6n냛nxju!U-/7jբ d9 !4G$Anyg{T0uCn7k4ő-[_]<`Vz}x۶O# p~VF*;|qfup^]ݿ_^o><ٟy\C񷾧?tSUsD}3/)9i+v:TiSE -!+6GyCa{uGNѭ SQla4 Jz҅PQi ].3G@@^R =kGfh֣p7^#*KQ"w0}8Ko1<+l3Geyx\"2,g!tJ~*pƦM19<(D1lw&@iAʥ>inPpj- ̈́{6û]"aU0fu9ۆ \=MO-"܈Q'@0(1&%v˳ / Lؾ"yݻ''I ˗;3 1 V!} 0BV2!q3LEjI$"+1Q{ LQV%NgjY^|"dˡt5#D5r^z]kv돍Y(EGŔWDGifG9U| }8708У# kVcֽf}!#B2Uo<$ܴ]&a *xܻe#գxGZ0Ցw򹓶5Mx0`cePLzU B#4 鱒)ao:uDy]`jDZ%{/ezt{)Xik{aѡHISObǶDr->C6I:B_Luɴ ©bQsbt^~`PLQϘKԦzK0Q=*{7߼%߄NC~TGXn3rYK!ۦh\mV_秧 9ӳp=\Azjh1C;C0!eG%l귾$!,&$=<ɦ@lZ.A@BxmK罁lnZn;wc M$7 '"Q%U&Db9sn$ fKς 12L#J5QdGǬQK_(l 2dEm/F"ki%H9Fӈ6ʕ:D"yDs Xpy4BeZ.uIA级;X$&Vc:60WikJTE=6ElQr皆NMb /BNjJZs FA+b))b'BuQi/_nC5$=e5Zb|W[xOuu*R̨XC-hnjk56j 蚾W|WwQ Tpr$~TE5Լ662ZB,Xe .8,Jq`{X @E S,lryo+w82ر\Ωɪ0u<2(S.862(&Q΀G1p5Ţҥ&(Sp&[q;5r1'}Ub=>\)3ow{MqPl@ɬf}z Um 8%W˳խK?ܬ&`|u̇;w7W~[o*~,dzǯK_٤7(}SZMz w¿wРT;Թ%̩U51!M ө9&[NhFR.}X7DyQ~43W"j皮۷n2u+ GuJƺM6C[Z>43W"=&S%T9<% :Pj4.ֺqU骝{ͱ ۽^k•iZ6+td*ɋW$}hgE:EoĘb:hcݦ\gY/ޕ$Bev~h1_z Te4<o$II%b.Jlv[,fE|GfFduCC^&80vR1jfTr\.+BbCXYAz@\^CR^'H^$rًT bi;xxiOh'CC^b6yú(XON FuRQźug ֬[z ^uCC^:t=Y e!R *f,/4TKYSXo!Ud )HTéք:kwTi=I*14UҺա!/\E) S.^V'7WL6J(q `F(ܤGRYt&|뱹JTp,ﭑR)hN?w7H Y qsj[bRH\!ќ^4HDHHoBQbQN۷O?_Jd['2)ho_k 4Qc! у}-N;@ b!z!yv?BO/k?uAAkDn^KVn'7Ƨ@DB&m+6ڭ ~R:u~;=bL]H"-BNTKZۜ(o%$V\sh&lU jBY1!`G3BL0/9IڴNŬsa'JpRH7N}CR݅aSjbsn24搢>ΨDSMU7l'yjaV|/˅rE.bYpq [u  2-]=Ϊ}K՜v76Zc]' vDw?ÑlټqqLLȞ˞텢NLLb W;0^ yc!/p9϶nM,f0\{FdCHYowF h;XJyrt*"7#jՃ7, 4V1iYimTD!o_]|'X"ؓ.VNa"jf>.}ja^h]3qO13B-W&2]?|u053<1#|֓GPjfRuƕ[-Ό .ycJ!f>f]̋H9Bܨd4YZ1PX{`Z,7eHI4a6;}x)BykTR5$G1Q"?? GzXm>\rw϶G!Kk ┌&wXmDp>n4'* 0T![2OЬB\ιmtF׾$m"tUw/6M+P.]ی/=TM-I_ *۰T0*#IL T@\L] R~Yb_~}_==x8 qiD8QRpJx-q&WJHo4vxGrkZzo s+]֡ϟٛ_$ĎEߘ{݁nU6˯L j?uo^vx6d M,Dl.5`Yaɱc\Hb[hDŽ稠JPJ {cDc\ X^jScSFsb"QxE9h(UOCtgˉ#FԅĜV[NpvƹsB 46g{M 9!D96[8a~{N>$\g(x&4N[92~c`_ߍ,(BwۻJLC CGADdd役7t·h4y}l3@Q>߹Yx >}"vΉ\HIz?[0])UBs(GQqؑlv 8v0L~l;%G7֕n-LW_NZ(=,!`辕$_I."8:}JcUr}VUBOӈi*U<×2}Ȧ뷸rw0se:Q(RHb6%@cAG%W9[ZšQTT`Tnpӭ s;u%Up,k[u,Eҵg\*аIxc$^ USh]sB?Bbq$S DoiiA|A$vyv^q/.=7ԯ$>',c7lsv͕4AԋzƐu|ƞƥɭ.)t3Zƚ J"aw0߄K]i>!@F)TtA>,XS+[DǍ)kkٿ`M$f"#pZ 쐐Л~"ŤYf=4$g"rtނ(-Px#lf_VbijG*! Aީshdĝ )i }T-U蟌Q8kB!5;x3ix,\?Ycd]Nvt/Q-熵oͻZ#tP+wݺPik;Sx˘.$Sh'27u*"&DŽl9d2 iG$6:@dc3 |2u)krOXf1 3^h\9 n 52w9q |ðm-HY0{i8IH@lvHy) .% ˍzb}%єQt|Jw8nT!\4R{9BT3I:;B ?* VEbA;-29ВQIZ+ b IG)ҽguC]jG9 "ޢO/m%beo‰wQ!]DñK2j)&Qm\ a ןk]<%U{J['vJSu)-{ƁBxmPy9잳kZ@*TUG=Ѳ.rF1!LڈEaYQh7n Z##^ܪ}evT Da2wIQڿxcJ "jdhcgGV-HdZVmT{Tq`yZczVp.*Oʥ"j5H U!SUr,YEZȕw*D_9V.+Mͫ|jsJJgZm?xt;Lq=!BkfZf},k? v`Nm),? zW; ig)iEk9Aw":YPFpwetofG 9\!&S2zg%ż N@U Gˏk(u q2[mqT{{KA4Q+F4a(oѵt^tt泫8~$rvQYGXkOCޗst ODZsALM/P@F=K^ c"kiIzJbuCZ؉Q7p~ cNji =1,H9dPZt&SBf_d1QEӏ?;<6|=PADQٴ ^|PJ} \ft/}UaI_r,̩ԢmW N0z% 4KTm*&B3ܳڣW:Ӫ&69҃}9 g@i_NA?D1-dhbHSޟSq}ى!JyW=C\b)g!2$BZMN "%;}GqX9-I7=AokT Áҥ pfFP|fV]+ "4~>33@/@BݟdPBqJ(.C eb5LcR}vVZ-!vJl*<2Qk,b@?пBy5x7ñ{t EVt!@a2[s0~ήCõXRMEsWɯ#*:%q\p?MG[0=w2/o,WƒyP <(Y< $A-\y +F$\;S_4S_2R9W&ɋ=1t1+7+ xnB{BB!&y1-D_ < S]ayƐI9TXn4Bkjq :HO"v:bf- 9, TBc p"B)dtQ^y'ǎ?+=0] f}^Û[x!{Q | tÇh3gB?x8 cx>0ypL1;4pΖ 1a2(xb%g( Y3;9fBsd0Igʖ8 ST#+,OPԒE& 6L'ʥ 2 (d<'{.p"1u>/8j»GXG]Ա/@b%W9H0R+EP}մ J.zY-A%h P4\jGQ.rŜ΂ 5+Yv 8 l+hy=TOb@ Z8h" ZIH!a_zUGB ɵqv(k )~ r,kG\k<ܯA,EGRڅ~KYHh%`Ew|nl[$n8"0vȹ@wrJK9%KF|MJ4̟zbp72 `eklb|rz5ѯ/n^lUεhڜO7oxlW{t /ɢ,P{_(Iqڽ"(6H¾ +j3u ockQiŎد=+`(s'#, 3e-L*C )șD8Uw?wC@,p4w(Kx|>&8z1p4˄EcM6Mn: ﻜ`4{[C!#pS/݋yX͊\A}RymEql6KMU+yf尊 (ob˸ 2(ezWz68+(3jN0|y{us>s7-ެ_Ɯشf} .O~9*"VjK4Di'.08;RyЈ>l~*Tˤi4vVv#؅Õ%Dzn{!(IM\uH ixۦ>eLwxm| S ݋Vߢ wrcDBt*_~{8|SLixT助scK[ az9aW_N=LV_vpU3V?wOI?$y۫M'y;%'oqW'NNa1^ct"6N/ռ5nV M i4?}Upr='fN'O;vNLGd{2Xg#]'mAWliuFھD&9*a $q^_ YS\0L.ֈ\TwXC :smizc囥sm^* !aG1=΍Z}VG?{ͣzV}{**Gc`7?qQ!QHvGH)Q}Ϣ߉=7P`yʎ+-86jZ٪ A>C9{II;q)vl-ZaC%qT%;"(OçUO>e%@ $jG9E-9I1@!DM&ܽv\;ڎkpwO׌/0Xs(!ʈw+\~ɴcQJQG9$_eQba:\UQ"-\71" ]$]eM$p$%TDC񩸛qnj2em_=v˵Y.},bpYykV.hz,Hcv 1KGY3{8Nr0OUP{' cb(HpK:b8PM"t&k\##פ  rR qtL=m&x49dHN!-qm.Ll" wR`r$HM U2q2hhNHO67x 'p+]d vG=aC8ASzz< *e"& j+T\(_Js(.4jzNv91'vo}c ׋(_GrZN0B/xl/͈^Ϋs-vRRǒztD&׶𽈊 bÅ {xIDrg !<1/E LRDŃ +q\#:F:gCFG@{WS/5E]U[~'5m٘P[5̗OI=O' 57qRzViSWlnnz^M{{SUjQ?kZZS87?_|[,땒c7c$~cfj|~.N[. pcSn4wGo=+|^o[P2Z\_'>c3<=cun wn4\&[Yn!Fȟ|z)F`[g 6LMrO%9PPBmpY U]V,ׅxǘ!g(aE1\Ρfdpp7"V}aEk&/ 1"!Qn ˉB"(t LGV H$(]]:ntr*΁Q,0Z6DХ\6D9P9Gx;K {]j>1f0p>*VT E}֨Kyբa"ZPꂃ W%/,eUcK%je( eϫ$'(eZ=PkV^2 CEEy 8bҋoڲ9E@n ADmhCJ)(X$ A'v8iĉK1i uަΓK &͢>/UM}7F$v6 0I 0iw̞Ȑ[2a~z̿nˢKt)6. 1c-$hhmu)j~7wݞXn4'F5'23Xjcn>1nmVFi[.|&p3Wt= WjJý ?Aw  VPb?~V;fUY'y7uЏ=8@! ם4׋vG]>AL_KWzvMmk/?*0v#]S;}p3;6/lhiz=xwgs:'h>;8zteÐ;2C#l4x&8]`sk&-=;mc9 1dvR5+;ezɭOI"8PNۮ*n\/Ow7|U@0> ف\ Sa%歲3GwaHzD|Z1ٝɷ˕ÅKo7&@%XfP+Q~s%`&n!Y"Tch+>Yn 7r=OKJC-jwg)/8 IFjXeyq&>q(TAi-ljE[CQ"ij87"$8j.+FFhM}^~|<q>N`$ LhЇ T(ABXfդQ Fw9BxL FKK_ $~5Fiq wA-%C, 9|*?wjT!yQsR^U l PuK ⪆WThsxa}l; c}8'zVvRIYKH1PD(5KkUWT֌!sY{(D+Y ٣vFy]YƢf*,0Kpɚɧa139%psL? "xʤ粨P9xF% A/Y9'?màC˲YRvMM/ͅbr͛!1!u Nٺ2V|{չhri)yI2^>yF珏dFZa1BRJ L[DP@M2hBaI9LmΤ=/Zwϋ(|}+pI(F60 ط0&K"Z=dDi垕yYdol97Ņ[69 t̊̽ 4 Ey#,2Iq% @~%MAh!樦ZbDWodo'B)xXyezw&Et@(ux^~lprN7s"Gמc4gD&d' PIh֖۷O6&pBurPHaPXJ+7TP5C̋_ܽr '2G,@M(,JƩ*@0,`ᅬ8RKYaHs|f>=~2֭˧<7Q/jyƳ~zR=VhW@ sWsQ;֪qzߕ?M277ۋI=/k&?Mn.0kMW7wbuhofk~xzxfGB,דALHJ!%/}P1DnEJ,G2S [ͥyU3BvZN|2!K P2݇  !!"ERjvgO$@# |0> νvzWל׹z޻1'[!c)/1ֱyX@NRBј<t80Xȹ~C Z##T!tihØvwem$zj_ i 1"'jcivˑ|!h=# szqUD|[\% yCXc2Ϊ싎~ hT@_tp00Wmf GC;RX. Ӓ~,U:~ŮmO|RB|k/ 9{sա?_y;4(&g}=Yz68h ~~aaaaY$VozQ& ki aȹYU^`Ia`{9xKa$0^b ˗e- Ɗ:Lfy>FPVr#c% sE__%KZW'_JN85TM71#@ vX;C `8 Ma3%x˩M# $)t'e)MhY_Ѧׄ!\/8kjS.0m%B1Tb:t5B\޺hQz###|,$z8Ȑ+a.8'RRX##W!ɽ@5 ~0#cpG)Ax'2:E'Co32( ?1QTЂr1(jϽs`dbscP^?O?-bb;ty`=q<#KTO9aKEqE99,]&kBDXqn&Gʈ{3"H11 DH͉(̺D,S),v1 ^2Z!)bM%[/8l(pOOI`)'V)Epmi+? n8f`T81iTcGv3*}f6ywsS;xn)v9.>&2ID\#0,r]/_i V_^?{6[Cu0U=LmG4[ a'^bpkK/!+ΠvQ֐>JC^LPk eA>h :'o'_oj=OD[\a,N{׃ =x</?՟|7r_>xFmuYZ™fx ϮftS7a+]e|x;|9ݺq2H6{tBQwss^KPC 3ADz]]*C)R=jWXeb͝`Vd,Ea%$ԬefAZrk'oUGrv Ϟ*x@XYlckƙ6Lf689ᷴV4Lvw|d%`YA>"MdYq|0VXf< a ,e6FL/,3d{膹洯5Rtwx*.((&M[M :W"/RJTkkįw՞51Zl)ۨ4[fj1c b-M0SS:crERj1 fͅ^rͥ1:PsE [DH[kX)dN>l-#~6q_F4vφԃcK}Q`1l(NAV_^qBV76{7_:o\^HbƱIGzY1h02Vp"Ft1Wx=WƂ'c0t'_pe^ ~I#4%HԡˆJQ\{tbqTJ}%w浖!q^ 1g9o@j89s=!GK@Ҍ `9h.T5e$1QvCqCʽ7Lӏuh^}f^b^L-T4Y16 95V,)j;!}l?Sx6cpu0a1hi.W6JHp2RK5ن:cy`Sɘ(3L+ؼvh6^{EV o0w:JeQa{ (ɣ% ݖ@oT+TllX*sX&PKk ⍞\Ĭ+bn}cKqRՌ{Т"kEb_oT j CzӆOBSra]<&x$ǰ'J:ej;qa=\D`]Ž`|W6m{b5[*VĊǎu᫫E#856F#~c'Azzf8CȰ& dSM4=:H7q~ŕ7׳f`E2|cq7_zp\9(NJnH >A۰lLM0(XfsҺD`12](%tlZʮkGN"*Alrj(%`wIS{qKXhNT⯶Q7˛vKQ7WufbVm뒬L=S9hd*6 =eo,;t{YG_7 [Anmut#?]iJ_F\ԯ|*Swk߲nj:떋A~u;aQں)$ZhN1*xW?ϗX B'gTG_}WPb%h3>_M5aI>Z$=1#p*hԻn^m<46>t<)lثGn2ƿ-QӔ%sB bxè`>G/7 I(CCۼ?տɭ 74X`|ʥj7)i-$$j9b_=vi] ֮>TW"c|W/,wz.0RS=х Nl(qtT\5̃jXS|KU߬^nI4мl*7fh]DҪ8g37jyv3TZ@3pV YT)t%=5ea3"kL 3<|1/(s.y)MvǭWASyx .0*gДIbk 5Q|-q1ᷱA XҌ֒ ⭶a(n'*6+V9DH: $c蕉gKO;=4;7qlD=&aSRo\ھk@`;.lGÒ-&5TC-W;Vfn*k Yiknj1Xj)68p`xN`Hl>ƬBI7OwDʓG 3HVO?ޘypG oiNHk$&7* }C& 2gpx3 =ߘ0: q|MZ>Hzx5݄DD&Ivk@$"՗N+澼9?vr4\W2Luy}tDg %zz%SHTY X" rSz(B JFaÓmti F 񌃜x7Lu3k#AQMNp82$$o11Ơ`q|OAQ(h`P8 -{$3R5qkk&5{G5A`3ҁpI5N0j0KxBCuKZ<\k^6/ 弚1]"ɼVVVɭ3NlU * Bug0)o(¼حŽ]C: #Rז2F(;"7`^K a#KBkBem]CЃxSk9H{+Iz͞$&Gi[y=`D0+eZ g vNPUIOD3FNC\#WM|わ1W7w;EQ-ŊxjؖSٜLzM#4 uJyQI&V+,!^A 4i|);Ǘh!b˝{knI] u9p6Df$5ʆSz9SUZK,3iBƑ5XG^r@$7@skmj-zpL,%osjˆ^_7ֹ_/٪_f˷~vqe}e>יgYlcٯkW=Swc ջo= 8zS^"TڤZf CM]is.q] `η}6֟@w!.>֪'mxW?*lYEoMsuwg[?&DX6&ʸBz>Àb\ٷ5{Ջo1C ڟ|#tժvp$wcV,y`/7I5]Vlnbyu5Վx&!p|5e 2| DQ),QҙBܑ-}9E >ؾz6fXXBEg;Ϡ¥IA>=IF$ʄ v_PP;ߙ{=VǿR';%mbk7HE<\~TiA!֌5gpP(=!nJ\rH#W"':͑ ^ =a+QU@@M u5nf*U |.Uِ~"9JNT#{}_7|.݆A*c6A6>P)jn{xSexwq{ :{<׳(S1$?n _mc< 1֍nBau+=/vUjf[b`wIƬR29/:*h;d7 !7^l8$}/A1;9ZR"/fq&hKgʟ$cʟ"Cpʂ15 (Or]K)?AN"dx,c=0גs!dA 1Cuk*_9pad+88n+8߷nR)yBDPF, `pqI nYKͻ1PЌ!FHUFⳟNlܟj*05hT^KY-!Q)A8li/5_%BO,=/$~y$=/䫍y"Gib|wf2#C'Q/>)~6L۟1eEQ]5$a Tmd%K3%`b>ICΆ+[,08L;ilpb1]p=Y.Wun?--aU(ha-sXJG טo*6z9"Omi zkgЃ+ۃ+d1SetbE,_QGHiS1*SccS@cx1l'#U Gv㡯q3$.D b!踡0 Sj8)00}Ǫ<|:POڱeNt vAl[ N"] FW:ӌA׉{(mcu[-]=N7 fm$4\]/xM8D!†Q.=<##Ϙ.u#D/-? [1hf=&oH+QH+ex8;c|B~@AR4,d#캃Is;l*LkD@Ņx&kB;!3 tY<4u\KkD~ƲPidӋ!*❦UŔ.KR OD֜^| h4r;u>2S,/P"/ZlZ̼ۛO7ax/|ojg}q,f8ދ@vwSy{z9[s6O,EGiҽ*4K_I/߮6݋КuP]h4.ƟGL?ֈN <=?wq¥ҁZScu䲰B &%C-UgU gUCgUW(9n*6_e~?8TװF_+^C=zAB 4쑈TN$ yq3P& Bx$h@{~ N=iblWm41a%I;5 ;3J2̑2XHo4XTǣAY1؆ :Aѣ H G9z7slF5[Hu GesO(&F)4C"v-Q.:W9o 0q[UzxF=n@ډ쥳{ %[RiH_8]ܴ%؛~F)f/$z:2C6<ᦘMqfo}PJn}щ;nwL`)謭[Sm ػ1Ai 8W ɫvCk B{cT5G؋8ڔ%tb6͞Q!U<@_qo8bF28oF緾[8g Cy>Q9J#%iv`>(A*nŸ•@PC)6+a3TTl-u"ձo |JY-鳻;)?S('~giXأlU"L] 4`c&4ŘCp,9(yt&5³0sa(9!HɩUq_5"4V3c\U)R2k>K0{n\V l[cs SN+XZxWVh["u*lS0%S)(gg(pL-R,Du.zq@/f,z RLZ Ҡ*]zK zş<{\Ddo?U@)[,!.mDl-z-|"%S}G"S-g9ڡCG8*9Q.6ÅKϐ*?cRk<19K oJB7?^i T_TL'a~bcO^UB8x_'mݲу#{s9G0V\ W׊^tl#H@lv%d I#>[S<]{BԷW(%{D81{:ixC,1o Cr :$JR.cK%Y^ofc<cs9k5(YuBZ8As1+JG,ֆ`52 b*++uŖaEdu\sHq 1MvO uer1ƀP J;'g"$";ITkq/W ]+7qgfHj ֛z՚f!%k_4lR_Gί'ՋuU \4ߐg_̪bڲ͇W/PzΕ^g[s3;[$khWDgׯ^A^,1{~+IplPb_@fLTm7/1Z_)]W\9o $Xrsa !D4[/0ӻ9hV9] X !4[`rhQ]8uێ-1}|T8WPmd6s$md:rF8[e})mF& YW<[}]mʃ"xYc7О 7cF5kX=řo/-sD.fq.]KӴk+0԰ lUnk[ѯ T_^_[<ˎ$Vݗg$ψ7.٦G@dvcE#>3%: )h%ky>D>4DE,t ktOن8p_V>OAOin`IN f(9;UgFBTxR IL,DSW$:ӘUUk7w[/ |DenڲibF-3j8fԎSJYSzn6}g 9i~! +c4l~q!6;64P!a܎߳z(tiތ|!OR)+꪿wN{etp lTM kMB5.Y/"*ѠF4肝;-jgi`z|XWxgH8_EsǰLSXS4Uh0 k3f,]plXK*)dKն Nd3(Lq8;em#n*~)W u|=Yփ@~u;K˅tdVwo~b\~bC~c=OFҙMk3k3~kcP  FX['C+-nգMd,ݜ .bJ紸$Ef|EKqU֥?M&aMqcjS{' #>c ).6E1#A,:-Vkʱ 0tCjBߊWezSMܼ|\~} !U ?/^_BVcq/^gq/{.yo&o*rxT^C‡o i}@vlifڌP *]'Kxw9!eO q2Ngi 6ݰJi>i7o>ێ۴Ϧi7d,)+*J17etLL!h4٨]g>Ob/ּU]Zܷ7f;Zmc9q|[sjͦZK|զe59&^eމќW4i8sϚP|=kfc84&@X(q.$*7:1 l)6>IFGSLpfAtwPmNγ1%gv36J%A>}ռ Ӧz1hIƔqMi~1mj EtLK*ap`2x=)z.SNZf8r2y P 3<| K0dLLlkP\CN/-pm^1k^(|G3])h /&_7kI;{e`#GkepRX;B(@tAAȉmU/%x$UOy_?o] `=xׄ50ƈ fG.3 y@, Aۡ*{)38hǠJT*չԣ+GT>&ZNQ0F}-YĠAZ7&;\mzu[=\Dd<}uP-`kLNRMʫŸ3< z5'Lʹ 0_Uu=4̘s0z1h9z10;q4қf^fa"@Qb @Xx?)=K R l$>VsxֶWgb`4xDk x(*;ql("o />MEcR\"f|"}*(E"aa8Yۨ/\ f>V 2áJ)khCoŋF ~-`1XpVX0YB*K'uo_^S\_*=r q2Nk) 7ϩ !om?I/mmc ^,W՛ћX*}}E89e?Xwvjb|eiజ2`I"6c&7*g뙬]jgP?$t2]B%8 JzO<#(lJZΤ* dv1 ?u 3)T#&rOz,Z}f}T*X%J)jEI[TՍ ڍoT,TV1J:1gpUSVWT.8d`,NΡSM *j,z]@' *1W%{6 CY-@,Hq}I i1H.Iq߿!% )J" lGO]]Gф`|p^eѱIbR|LHP 5ЇEͷNT!Q#kiO1ɠ 2ER@fǣ6IH=$hԡ*. 2&ü3z/_s,._;6ZfpUMRJ #:IUQ [4weJPkռ/~;0G0Să$55tbDBQB($ݠ6!V1VGV)"ACgp*.jVvPS*"sLJ`D'q6xoXn"W 愋%!$Bf }$BȀ&b55 IDh 3hK)Z6EygG7tMtNJYŝj,nPC %JdW IGPgLڂ$ѓ2V4 xPJ' J3uPkFَ*$F?EIJ.@&D.5ڡ!4' YQv8Ktе@h]#F[0*O pu@0Sb5zWO0}#+1rtqGFTN;H:hdXBXVABF5Eˮ;!r#Ln_hqVbuze%o],6؇;67yM1 ͫOޜQ|8jxGz{z~~8rr9ONOi>F022~[D}D:~~t=ˉʍç"i]%/%Y6^$7w YHklVa(Ta+ 4@4` +/1ήř:r)XS‹n]kDdxˍ$\X|A%(5qh6K.OUƲ}Ku.3Nr;d݃TIq0fE'YVzpV@LsSP#GccN=_' lcoZf+ 0爳6kKx[qi=± d*K<s0nn´-V{tx0mKt4|w[AiEV%+fE,U-Q:'ǩ1$ /#I|/jFzc>23JB^ peX =H i׼Cu]22[}#I\;;j>y.O&t= J~rt!%dF !bwRJ]5W:z"'AOM(!ʁe*^9ߋ{tWl6x7ms0D43:`I[B<1 XE,VDR@}6x[>7TuU,LFQϒT$!K3rZJXmpFW 7PEF nx*70xžbd/D)/Rw./?~jrK+wL>Oiyʘ8:;,/pW8׍bl~xybl4 9aҼֺClv@;,,F-hC,7.1ehfzhoSɢ<1ONxy'C4倃0] 9o_ц}נp?3+Kȍ~A{bc蚰/ ?7Q+^M柪02lTU@uJEtѸR)Umz{k kx=hI1>;C#Cg83\&ICtcof#rGgʑ*lK^Pш ,odlͯ<ؒ>_5.P/:sנ wqP*,bjYW\{Fz@ד.2Ü_+goߩby"ES,+`\IJB`,˝;+LLVژgc'I-E@IKwM͂3ޫ< tPrh bmX,k!W(KbBan[u~4fRey3:OQ1;j@c>N{k``>찎Ŧ[[oNԭrzn]vDwÕ_"1 5qceJ*aYIVTpJ)$$GщB⃢9-#jQ+#1qOi"q^ZzF[Ƣ`"HI!<ΖQ|e\cmӯ',k?]̧6독"G?Ts1i^^8rSSw?,tNd n^yD,OFX*w]MOY t6_uT;vpqQ~@.^O>2SNQ~3}ƨ"w/Ihke6P9iҧ1ZʓKCEݴR0 nW7h!YN\4ζ_Y>Caobl^7[r`5Z|Yͽ80C](j!J V0WA5y >%}nw(F1$^T\&TBt%CMTyNtq^)?cpg|oZj3%^q&Վ(ʨu!teLe4V 8m< J"E[@KQ11ץ$!GjNij^ZS֦$SBƜו$xõO)r깖*Had6\f ZuSs*1h m<5*Oo[yP'4J#g/裭T!FQ-xnX4Jbc3Vk $ yvjP0Aɑx+qKs$FUj dc$15c)z@G"]m2,ܧx~9R!i"B ?^Дt|+h%<-4RqLLJqגRG`1t?fFl쭤&H'ToazѩHEDAk@29ƩJFfUKU.bl0Gפ$R<S'|%*~X!2so\C+&ѾG$x^c+UyVjecPdk}=i&W6D[&i] ) ]"lftًؗvwGucl 7 >-(yG_zjvץQ|ݫЖdol{C.~͗R'HW.dkDo-kܿzQU᪷@KA|@oGWGT$ޮo2JF#PY6'}JiFtJv;) k[wAn]Hȟ\DdJ}_I#AҠ>cv@lڭ|RHKօEtUXNLLyq7ʳ*/׏=:M~rݼUXPV᳻k.*\twmI_!p U.%.% 5vNJV/#dk@ kbz^si흡f5b>x2N9 |XY;T&UJ1j)1N 6 # M!fjF@nu6.ĒaA.F* "Ym%:l/}vr]l!F| k$!:iԞskdZڐ>? ?C˱f#T7 rS`(̉[9dQ K9,.[rX|N/R4@jA GJV@ ,Fc|4Bi4hjJ7nP$o MV;C?6Y@B$V|bRLd $Ev*J0ic'fUP8:"+aK(,|E@ `qE+Y0W  b  r@Wj-QW{955/3DH7g5 3 RL%P~LVH$9XWGQ f~ (\u_ Lu V`. m"]JDc}L`VYk0k0k@Ygl jտE^J-0j>f۰6"W-ƣ+XqJGq$*|lm9J 쀬3i:TA:Xnhw 9]2hA9ٸzZԋVa,dEܷ\[2}$rEi*r !UjOS-:t hw !6zi#y)ǃ¼vSƭrI2ޡH͜O9m zq>0xj[e2Kf9$esa V8-!PwWyt,~:І c҇z?KJҨDod }~zl{XV_YNda;6ҳlgJϲ5tc‰(کQ:@}y:_.}A۷?_/ncq(wIsrހ"ٝ "i;Z$_bw~˦d@|̂ј.{$ u?ٙXS~SK8`|=/.+:V$#'gqlom066J|rE2.,ѦZ-f7N g{ z5PثUk-Sz&d@DZ{xJX&=J/m^aKb Z\ZF)L4s5 VI;Saȟ`6\ɃIRR`^Ȣh@EnAoCF:`?6(q"?7:"䶘@>Fe%(Z$n@Ե0NGz[Qː'&& a\niS^SQFkPXVJ($^LFa_'Nߜtnz|C4[– ~L7! ^\|ʑ\$.9ɯST2&"{v1B]I͡|Bk G^O^JH` L@,8@bܡ"Dͥ|TYO{L[f о2z{Tq$b$ ->4uvwDwCM%fw@p{X@갩v՚n{:9o%&l~ i1S T,VXG:{N.7Hut'):2r 9VĜO n&4 .rf [mwNԍıKh{qma6z߮Q#1CnA) /#VdEWQS"EJZ桹ݭIqVde(zdcVpR%+t[vN[HPk(w:m=[/҂s +'`!(^2jLc(*hҞ+=JV(tG Վa΢ߝj[c zjjTk¸MDo{0BBOD,%D5's2C`2ȝhe) >j zX SJRO8!$-3&B;4-}M:g3B&1hԶQ10Mu 6i/|i-du#PoNr.o^|3˛?d,4UtMP$2Rħ,ZTO'l,!d!24rNKuN;Ii#Q܅_I>{_EG/>Ϳ&7#O]NcʉLǻU]iE;*z7?5Yfg݌g&\M>qz@}3:շl[]֕ Ȧ18 Q=%F7NK[WNQRRue_wʆGY{? >EPX( \B*|{oFUPj*z?-EhWU>T:LzЛvx0Z`dL"C[ǤmOAh0g;weI}/pQyE Z#}.7HIER'YEآEȊ8"# }wT `'+$չmeg JdoM"p<` :4z#~y0Gt?*'d7Ya WC öhцZmv}dq޺ -5 ??rlA:x&nL8i$UF:OXD I95O[DВZlLsezNۮR- yj 45&O&󐪼xDպޗ|A [l'ICP^HU$Gc+L~{!ϯ^^Euťs2:*/Ly#yfQuu*7Q2o['YbQgu;H.܄A Ndno J!ci֔TKJ c*լy| meP=6hA|Z-h!/0Ymƙuk+贽&m~s*ͺBVhݺ@ luKzo]),%2R"PXRXU|u6w(iŻZ9k_+{v5VOGz5^yaJrxr`mԠ-RS>FL2jf"y/:z^@]}7XozdC/6]l Me[L]w_쉕X_K ]ȒD!_7F1R*?^O.d99L8,w z佝CWr{^>o͕?N]%JtjG"J.C)u~M(}UA4^Qz(e V<}+olR3|Y: m.Og G^whAݲBčY|8w o[sF⚑\G0SoNgWX#{W@k܇7{8;' @s#2C9:騇 &|:|˶ˠ5*NU@9?lbnm+46lgfaJŰVVJitڃRabZu[qԥ=SJƋSwҥTßtix\|ƋsOtÄ`;= k4sgBػBwCazkU5{6\dpVkm/lρbB4&qV$\??S޶tk4]ukVOUɾϪp/ħ!z'{ u5<;6^[f6kԞGts_YV#Rq֠=N缏;G~ͻnPZ37-!jM"1SiGI%uS+mD#mCkt.{Z"T4^\Imi뇧 i_.DcO84?\]sh jUaSф5"ill-2 muޕ% 23sEg;$ɠt/01[{Y8ńz= _ra9ʰeX\|_WJA s$=x]$p$n|bؐJZ5`^{E.7j{׃EUf+g.SVW;^7_(DJ{ة5=r~]4+k 0;o< PI(ؠR\j0[" :NK7ʈ"T{殚L䒌ށD1\)k'Hh'DM b^"eB4){QbէTCݱKFzcQ,dJk vD&E-u5Jy(=M*BG.DJ1x|ׂ-LXƽqu3_qv'JitY2HI+VV[\%1B$@=?hr+bv'iGT䌍84 D[*FG)dHiϭ977Du_>u7Mrϩ6Ǧ8n)C)3q(@STR BQz( \ A0h•\ƙ]52zmyT4TzZ~xAUV |eGXE87UTŴ-_thl/ɹo%QBo&q|mC{onҍgPWa0aH &E]|QlkWT0?[SÔYoNYL0SEgaZ%&J[Ε.b0G:F񞰵eDf,m􉽶I~֕8C>FbqR!iE )\|єtVWTclHqLL҉_֖wƏ;ݾzc`QW& a2A+|PPTDA0ZA'O(UoLk[vas(r?R]㦦uW"YiIBN2JI e!rH$W; զ7^ _ξH25)EecP0G2[UkeI0Q?݋'OQ}jx|bpO 3m{D)2T1񋇌R^XݕR ]H `f7c(O3 "zZ6(S%$_RGV11[傉j*kG8g81GAdAfa 4Fp3VpЫ4ZZiZysp*զQpt:J'V YYodOPZ6iN`=G"Jʵu*TR XCD0e(=D0e(&͇RQX"+ATv\P p|( 4ǥR R^ҚjC1{:pB!Ja9卸s%<Ϡb4Z_~sVߓV"h<39ow7`C b2kM ղj9z}Uhd5;x),cP&%[zߜlH-G_d29Du>/=Mnhsu1'듓3\R=k!hC#]8mN=B(:nJA8{>=(|0|k!<3~Z6BPp4ph= C &C$,^3Y;^D-* 6–L=?sLܿ:TGKo7܁Q*?D?Wz.$^"./߮VxkHwh%g `Z AHq(Gy}var/home/core/zuul-output/logs/kubelet.log0000644000000000000000003746036315140123160017702 0ustar rootrootFeb 02 12:51:11 crc systemd[1]: Starting Kubernetes Kubelet... Feb 02 12:51:11 crc restorecon[4702]: Relabeled /var/lib/kubelet/config.json from system_u:object_r:unlabeled_t:s0 to system_u:object_r:container_var_lib_t:s0 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/device-plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/device-plugins/kubelet.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/volumes/kubernetes.io~configmap/nginx-conf/..2025_02_23_05_40_35.4114275528/nginx.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/22e96971 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/21c98286 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8/containers/networking-console-plugin/0f1869e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c15,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/46889d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/5b6a5969 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/setup/6c7921f5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4804f443 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/2a46b283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/a6b5573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/4f88ee5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c225,c458 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/5a4eee4b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c963 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/d1b160f5dda77d281dd8e69ec8d817f9/containers/kube-rbac-proxy-crio/cd87c521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c215,c682 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_33_42.2574241751/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/38602af4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/1483b002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/0346718b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/d3ed4ada not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/3bb473a5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/8cd075a9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/00ab4760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/containers/router/54a21c09 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c24 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/70478888 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/43802770 not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/955a0edc not reset as customized by admin to system_u:object_r:container_file_t:s0:c176,c499 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/bca2d009 not reset as customized by admin to system_u:object_r:container_file_t:s0:c140,c1009 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/37a5e44f-9a88-4405-be8a-b645485e7312/containers/network-operator/b295f9bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c589,c726 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..2025_02_23_05_21_22.3617465230/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-binary-copy/cnibincopy.sh not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..2025_02_23_05_21_22.2050650026/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes/kubernetes.io~configmap/cni-sysctl-allowlist/allowlist.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/bc46ea27 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5731fc1b not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/egress-router-binary-copy/5e1b2a3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/943f0936 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/3f764ee4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/cni-plugins/8695e3f9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/aed7aa86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/c64d7448 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/bond-cni-plugin/0ba16bd2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/207a939f not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/54aa8cdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/routeoverride-cni/1f5fa595 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/bf9c8153 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/47fba4ea not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni-bincopy/7ae55ce9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7906a268 not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/ce43fa69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/whereabouts-cni/7fc7ea3a not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/d8c38b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c203,c924 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/9ef015fb not reset as customized by admin to system_u:object_r:container_file_t:s0:c138,c778 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/containers/kube-multus-additional-cni-plugins/b9db6a41 not reset as customized by admin to system_u:object_r:container_file_t:s0:c574,c582 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/b1733d79 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/afccd338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/network-metrics-daemon/9df0a185 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/18938cf8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c476,c820 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/7ab4eb23 not reset as customized by admin to system_u:object_r:container_file_t:s0:c272,c818 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/containers/kube-rbac-proxy/56930be6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c432,c991 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_35.630010865 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..2025_02_23_05_21_35.1088506337/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes/kubernetes.io~configmap/ovnkube-config/ovnkube.conf not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/0d8e3722 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/d22b2e76 not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/kube-rbac-proxy/e036759f not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/2734c483 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/57878fe7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/3f3c2e58 not reset as customized by admin to system_u:object_r:container_file_t:s0:c89,c211 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/375bec3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c382,c850 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/containers/ovnkube-cluster-manager/7bc41e08 not reset as customized by admin to system_u:object_r:container_file_t:s0:c440,c975 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/48c7a72d not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/4b66701f not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/containers/download-server/a5a1c202 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..2025_02_23_05_21_40.3350632666/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-cert-acceptance-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/ovnkube-identity-cm/additional-pod-admission-cond.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..2025_02_23_05_21_40.1388695756 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/volumes/kubernetes.io~configmap/env-overrides/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/26f3df5b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/6d8fb21d not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/webhook/50e94777 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208473b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/ec9e08ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3b787c39 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/208eaed5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/93aa3a2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/ef543e1b-8068-4ea3-b32a-61027b32e95d/containers/approver/3c697968 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c22 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/ba950ec9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/cb5cdb37 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3b6479f0-333b-4a96-9adf-2099afdc2447/containers/network-check-target-container/f2df9827 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..2025_02_23_05_22_30.473230615/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_24_06_22_02.1904938450/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/fedaa673 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/9ca2df95 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/machine-config-operator/b2d7460e not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2207853c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/241c1c29 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/containers/kube-rbac-proxy/2d910eaf not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/..2025_02_23_05_23_49.3726007728/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/..2025_02_23_05_23_49.841175008/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/etcd-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178 not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.843437178/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/c6c0f2e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/399edc97 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8049f7cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/0cec5484 not reset as customized by admin to system_u:object_r:container_file_t:s0:c263,c871 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/312446d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c406,c828 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/containers/etcd-operator/8e56a35d not reset as customized by admin to system_u:object_r:container_file_t:s0:c84,c419 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.133159589/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/2d30ddb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/eca8053d not reset as customized by admin to system_u:object_r:container_file_t:s0:c380,c909 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/c3a25c9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c168,c522 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/containers/kube-controller-manager-operator/b9609c22 not reset as customized by admin to system_u:object_r:container_file_t:s0:c108,c511 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/e8b0eca9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/b36a9c3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/dns-operator/38af7b07 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/ae821620 not reset as customized by admin to system_u:object_r:container_file_t:s0:c106,c418 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/baa23338 not reset as customized by admin to system_u:object_r:container_file_t:s0:c529,c711 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/containers/kube-rbac-proxy/2c534809 not reset as customized by admin to system_u:object_r:container_file_t:s0:c968,c969 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3532625537/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/59b29eae not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/c91a8e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c381 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/4d87494a not reset as customized by admin to system_u:object_r:container_file_t:s0:c442,c857 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/containers/kube-scheduler-operator-container/1e33ca63 not reset as customized by admin to system_u:object_r:container_file_t:s0:c661,c999 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/8dea7be2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d0b04a99 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/kube-rbac-proxy/d84f01e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/4109059b not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/a7258a3e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/containers/package-server-manager/05bdf2b6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/f3261b51 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/315d045e not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/5fdcf278 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/d053f757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/containers/control-plane-machine-set-operator/c2850dc7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..2025_02_23_05_22_30.2390596521/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes/kubernetes.io~configmap/marketplace-trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fcfb0b2b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c7ac9b7d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/fa0c0d52 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/c609b6ba not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/2be6c296 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/89a32653 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/4eb9afeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/containers/marketplace-operator/13af6efa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/b03f9724 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/e3d105cc not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/containers/olm-operator/3aed4d83 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1906041176/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/0765fa6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/2cefc627 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/3dcc6345 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/containers/kube-storage-version-migrator-operator/365af391 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-SelfManagedHA-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-TechPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-DevPreviewNoUpgrade.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes/kubernetes.io~empty-dir/available-featuregates/featureGate-Hypershift-Default.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b1130c0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/236a5913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-api/b9432e26 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/5ddb0e3f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/986dc4fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/8a23ff9a not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/9728ae68 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/containers/openshift-config-operator/665f31d0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c12 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1255385357/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/..2025_02_23_05_23_57.573792656/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/service-ca-bundle/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_22_30.3254245399/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes/kubernetes.io~configmap/trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/136c9b42 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/98a1575b not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/cac69136 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/5deb77a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/containers/authentication-operator/2ae53400 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3608339744/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes/kubernetes.io~configmap/config/operator-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/e46f2326 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/dc688d3c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/3497c3cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/containers/service-ca-operator/177eb008 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.3819292994/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/af5a2afa not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/d780cb1f not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/49b0f374 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/containers/openshift-apiserver-operator/26fbb125 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c13 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.3244779536/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/cf14125a not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/b7f86972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/e51d739c not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/88ba6a69 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/669a9acf not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/5cd51231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/75349ec7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/15c26839 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/45023dcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/ingress-operator/2bb66a50 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/64d03bdd not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/ab8e7ca0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/containers/kube-rbac-proxy/bb9be25f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c11 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_22_30.2034221258/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/9a0b61d3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/d471b9d2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/containers/cluster-image-registry-operator/8cb76b8e not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/11a00840 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/ec355a92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/containers/catalog-operator/992f735e not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..2025_02_23_05_22_30.1782968797/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:11 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d59cdbbc not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/72133ff0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/c56c834c not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/d13724c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/containers/openshift-controller-manager-operator/0a498258 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c14 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa471982 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fc900d92 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/containers/machine-config-server/fa7d68da not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/4bacf9b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/424021b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/migrator/fc2e31a3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/f51eefac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/c8997f2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/containers/graceful-termination/7481f599 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..2025_02_23_05_22_49.2255460704/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes/kubernetes.io~configmap/signing-cabundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/fdafea19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/d0e1c571 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/ee398915 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/containers/service-ca-controller/682bb6b8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a3e67855 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/a989f289 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/setup/915431bd not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/7796fdab not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/dcdb5f19 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-ensure-env-vars/a3aaa88c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/5508e3e6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/160585de not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-resources-copy/e99f8da3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/8bc85570 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/a5861c91 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcdctl/84db1135 not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/9e1a6043 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/c1aba1c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd/d55ccd6d not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/971cc9f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/8f2e3dcf not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-metrics/ceb35e9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/1c192745 not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/5209e501 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-readyz/f83de4df not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/e7b978ac not reset as customized by admin to system_u:object_r:container_file_t:s0:c294,c884 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/c64304a1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c1016 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/2139d3e2895fc6797b9c76a1b4c9886d/containers/etcd-rev/5384386b not reset as customized by admin to system_u:object_r:container_file_t:s0:c666,c920 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/cce3e3ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/multus-admission-controller/8fb75465 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/740f573e not reset as customized by admin to system_u:object_r:container_file_t:s0:c435,c756 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/containers/kube-rbac-proxy/32fd1134 not reset as customized by admin to system_u:object_r:container_file_t:s0:c268,c620 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/0a861bd3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/80363026 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/containers/serve-healthcheck-canary/bfa952a8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c19,c24 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..2025_02_23_05_33_31.2122464563/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..2025_02_23_05_33_31.333075221 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/793bf43d not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/7db1bb6e not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/kube-rbac-proxy/4f6a0368 not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/c12c7d86 not reset as customized by admin to system_u:object_r:container_file_t:s0:c381,c387 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/36c4a773 not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/4c1e98ae not reset as customized by admin to system_u:object_r:container_file_t:s0:c142,c438 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/containers/machine-approver-controller/a4c8115c not reset as customized by admin to system_u:object_r:container_file_t:s0:c129,c158 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/setup/7db1802e not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver/a008a7ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-syncer/2c836bac not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-cert-regeneration-controller/0ce62299 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-insecure-readyz/945d2457 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/containers/kube-apiserver-check-endpoints/7d5c1dd8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c97,c980 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/advanced-cluster-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-broker-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq-streams-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amq7-interconnect-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-automation-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ansible-cloud-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry-3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bamoe-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/index.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/businessautomation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cephcsi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cincinnati-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-kube-descheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/compliance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/container-security-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/costmanagement-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cryostat-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datagrid/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devspaces/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devworkspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dpu-network-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eap/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/file-integrity-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-console/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fuse-online/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gatekeeper-operator-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jws-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kernel-module-management-hub/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kiali-ossm/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logic-operator-rhel8/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lvms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mcg-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mta-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mtv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-client-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-csi-addons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-multicluster-orchestrator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odf-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odr-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/bundle-v1.15.0.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/channel.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-cert-manager-operator/package.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-custom-metrics-autoscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-pipelines-operator-rh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-secondary-scheduler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-bridge-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/quay-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/recipe/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/red-hat-hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redhat-oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rh-service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhacs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhbk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhdh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhods-prometheus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhpam-kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhsso-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rook-ceph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/run-once-duration-override-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sandboxed-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/security-profiles-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/serverless-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-registry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/servicemeshoperator3/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/submariner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tang-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustee-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volsync-product/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/catalog/web-terminal/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/bc8d0691 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/6b76097a not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-utilities/34d1af30 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/312ba61c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/645d5dd1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/extract-content/16e825f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/4cf51fc9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/2a23d348 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/containers/registry-server/075dbd49 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/..2025_02_24_06_09_13.3521195566/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes/kubernetes.io~configmap/serviceca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/dd585ddd not reset as customized by admin to system_u:object_r:container_file_t:s0:c377,c642 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/17ebd0ab not reset as customized by admin to system_u:object_r:container_file_t:s0:c338,c343 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/containers/node-ca/005579f4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c842,c986 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_23_05_23_11.449897510/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_23_05_23_11.1287037894 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..2025_02_23_05_23_11.1301053334/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes/kubernetes.io~configmap/audit-policies/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/bf5f3b9c not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/af276eb7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/fix-audit-permissions/ea28e322 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/692e6683 not reset as customized by admin to system_u:object_r:container_file_t:s0:c49,c263 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/871746a7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c701 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/containers/oauth-apiserver/4eb2e958 not reset as customized by admin to system_u:object_r:container_file_t:s0:c764,c897 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..2025_02_24_06_09_06.2875086261/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/console-config/console-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_09_06.286118152/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..2025_02_24_06_09_06.3865795478/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/oauth-serving-cert/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..2025_02_24_06_09_06.584414814/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/ca9b62da not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/containers/console/0edd6fce not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.2406383837/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.openshift-global-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/config/openshift-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.1071801880/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877 not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..2025_02_24_06_20_07.2494444877/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes/kubernetes.io~configmap/proxy-ca-bundles/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/containers/controller-manager/89b4555f not reset as customized by admin to system_u:object_r:container_file_t:s0:c14,c22 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..2025_02_23_05_23_22.4071100442/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes/kubernetes.io~configmap/config-volume/Corefile not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/655fcd71 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/0d43c002 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/dns/e68efd17 not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/9acf9b65 not reset as customized by admin to system_u:object_r:container_file_t:s0:c457,c841 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/5ae3ff11 not reset as customized by admin to system_u:object_r:container_file_t:s0:c55,c1022 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/containers/kube-rbac-proxy/1e59206a not reset as customized by admin to system_u:object_r:container_file_t:s0:c466,c972 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/27af16d1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c304,c1017 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/7918e729 not reset as customized by admin to system_u:object_r:container_file_t:s0:c853,c893 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/containers/dns-node-resolver/5d976d0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c585,c981 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..2025_02_23_05_38_56.1112187283/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/config/controller-config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_23_05_38_56.2839772658/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes/kubernetes.io~configmap/trusted-ca/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/d7f55cbb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/f0812073 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/1a56cbeb not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/7fdd437e not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/containers/console-operator/cdfb5652 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c25 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..2025_02_24_06_17_29.3844392896/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/etcd-serving-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..2025_02_24_06_17_29.848549803/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..2025_02_24_06_17_29.780046231/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/audit/policy.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..2025_02_24_06_17_29.2926008347/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/image-import-ca/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..2025_02_24_06_17_29.2729721485/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes/kubernetes.io~configmap/trusted-ca-bundle/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/fix-audit-permissions/fb93119e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver/f1e8fc0e not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/containers/openshift-apiserver-check-endpoints/218511f3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c336,c787 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes/kubernetes.io~empty-dir/tmpfs/k8s-webhook-server/serving-certs not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/ca8af7b3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/72cc8a75 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/containers/packageserver/6e8a3760 not reset as customized by admin to system_u:object_r:container_file_t:s0:c12,c18 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..2025_02_23_05_27_30.557428972/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes/kubernetes.io~configmap/service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4c3455c0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/2278acb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/4b453e4f not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/containers/cluster-version-operator/3ec09bda not reset as customized by admin to system_u:object_r:container_file_t:s0:c5,c6 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..2025_02_24_06_25_03.422633132/anchors/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/trusted-ca/anchors not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..2025_02_24_06_25_03.3594477318/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/image-registry.openshift-image-registry.svc.cluster.local..5000 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~configmap/registry-certificates/default-route-openshift-image-registry.apps-crc.testing not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/edk2/cacerts.bin not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/java/cacerts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/openssl/ca-bundle.trust.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/tls-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/email-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/objsign-ca-bundle.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2ae6433e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fde84897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75680d2e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/openshift-service-serving-signer_1740288168.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/facfc4fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f5a969c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CFCA_EV_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9ef4a08a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ingress-operator_1740288202.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2f332aed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/248c8271.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d10a21f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ACCVRAIZ1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a94d09e5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c9a4d3b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40193066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd8c0d63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b936d1c6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CA_Disig_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4fd49c6c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AC_RAIZ_FNMT-RCM_SERVIDORES_SEGUROS.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b81b93f0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f9a69fa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b30d5fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ANF_Secure_Server_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b433981b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93851c9e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9282e51c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7dd1bc4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Actalis_Authentication_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/930ac5d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f47b495.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e113c810.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5931b5bc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Commercial.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2b349938.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e48193cf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/302904dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a716d4ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Networking.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/93bc0acc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/86212b19.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certigna_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b727005e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbc54cab.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f51bb24c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c28a8a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AffirmTrust_Premium_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9c8dfbd4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ccc52f49.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cb1c3204.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ce5e74ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd08c599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6d41d539.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb5fa911.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e35234b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8cb5ee0f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a7c655d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f8fc53da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Amazon_Root_CA_4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/de6d66f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d41b5e2a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/41a3f684.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1df5a75f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_2011.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e36a6752.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b872f2b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9576d26b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/228f89db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_ECC_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fb717492.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d21b73c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b1b94ef.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/595e996b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Atos_TrustedRoot_Root_CA_RSA_TLS_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b46e03d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/128f4b91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_3_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81f2d2b1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Autoridad_de_Certificacion_Firmaprofesional_CIF_A62634068.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3bde41ac.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d16a5865.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_EC-384_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0179095f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ffa7f1eb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9482e63a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4dae3dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/BJCA_Global_Root_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e359ba6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7e067d03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/95aff9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7746a63.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Baltimore_CyberTrust_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/653b494a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3ad48a91.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Buypass_Class_2_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/54657681.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/82223c44.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8de2f56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2d9dafe4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d96b65e2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee64a828.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/40547a79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5a3f0ff8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a780d93.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/34d996fb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/eed8c118.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/89c02a45.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b1159c4c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/COMODO_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d6325660.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d4c339cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8312c4c1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certainly_Root_E1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8508e720.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5fdd185d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48bec511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/69105f4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0b9bc432.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Certum_Trusted_Network_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/32888f65.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b03dec0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/219d9499.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_ECC_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5acf816d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbf06781.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-01.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc99f41e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/CommScope_Public_Trust_RSA_Root-02.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/AAA_Certificate_Services.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/985c1f52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8794b4e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_BR_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e7c037b4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ef954a4e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_EV_Root_CA_1_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2add47b6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/90c5a3c8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0f3e76e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/53a1b57a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/D-TRUST_Root_Class_3_CA_2_EV_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5ad8a5d6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/68dd7389.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d04f354.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d6437c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/062cdee6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bd43e1dd.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Assured_ID_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7f3d5d1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c491639e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3513523f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/399e7759.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/feffd413.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d18e9066.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/607986c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c90bc37d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1b0f7e5c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e08bfd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Global_Root_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dd8e9d41.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed39abd0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a3418fda.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bc3f2570.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_High_Assurance_EV_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/244b5494.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/81b9768f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4be590e0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_ECC_P384_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9846683b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/252252d2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e8e7201.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_TLS_RSA4096_Root_G5.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d52c538d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c44cc0c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/DigiCert_Trusted_Root_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/75d1b2ed.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a2c66da8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ecccd8db.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust.net_Certification_Authority__2048_.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/aee5f10d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3e7271e8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0e59380.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4c3982f2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b99d060.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf64f35b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0a775a30.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/002c0b4f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cc450945.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_EC1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/106f3e4d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b3fb433b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GlobalSign.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4042bcee.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/02265526.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/455f1b52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0d69c7e1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9f727ac7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Entrust_Root_Certification_Authority_-_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5e98733a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0cd152c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dc4d6a89.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6187b673.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/FIRMAPROFESIONAL_CA_ROOT-A_WEB.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ba8887ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/068570d1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f081611a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/48a195d8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GDCA_TrustAUTH_R5_ROOT.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f6fa695.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab59055e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b92fd57f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GLOBALTRUST_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fa5da96b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ec40989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7719f463.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/GTS_Root_R1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1001acf7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f013ecaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/626dceaf.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c559d742.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1d3472b9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9479c8c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a81e292b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4bfab552.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_E46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Go_Daddy_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e071171e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/57bcb2da.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_ECC_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ab5346f4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5046c355.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HARICA_TLS_RSA_Root_CA_2021.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/865fbdf9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da0cfd1d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/85cde254.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_ECC_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cbb3f32b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureSign_RootCA11.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hellenic_Academic_and_Research_Institutions_RootCA_2015.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5860aaa6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/31188b5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/HiPKI_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c7f1359b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f15c80c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Hongkong_Post_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/09789157.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ISRG_Root_X2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/18856ac4.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e09d511.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Commercial_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cf701eeb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d06393bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/IdenTrust_Public_Sector_Root_CA_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/10531352.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Izenpe.com.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SecureTrust_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b0ed035a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsec_e-Szigno_Root_CA_2009.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8160b96c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e8651083.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2c63f966.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_ECC_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d89cda1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/01419da9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_RSA_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7a5b843.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Microsoft_RSA_Root_Certificate_Authority_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bf53fb88.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9591a472.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3afde786.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Gold_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NAVER_Global_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3fb36b73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d39b0a2c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a89d74c2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/cd58d51e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b7db1890.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/NetLock_Arany__Class_Gold__F__tan__s__tv__ny.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/988a38cb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/60afe812.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f39fc864.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5443e9e3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GB_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e73d606e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dfc0fe80.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b66938e9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1e1eab7c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/OISTE_WISeKey_Global_Root_GC_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/773e07ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c899c73.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d59297b8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ddcda989.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_1_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/749e9e03.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/52b525c7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_RootCA3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d7e8dc79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a819ef2.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/08063a00.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6b483515.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_2_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/064e0aa9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1f58a078.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6f7454b3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7fa05551.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76faf6c0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9339512a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f387163d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee37c333.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/QuoVadis_Root_CA_3_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e18bfb83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e442e424.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fe8a2cd8.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/23f4c490.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5cd81ad7.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f0c70a8d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7892ad52.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SZAFIR_ROOT_CA2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4f316efb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_EV_Root_Certification_Authority_RSA_R2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/06dc52d5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/583d0756.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Sectigo_Public_Server_Authentication_Root_R46.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_ECC.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0bf05006.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/88950faa.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9046744a.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/3c860d51.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_Root_Certification_Authority_RSA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/6fa5da56.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/33ee480d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Secure_Global_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/63a2c897.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SSL.com_TLS_ECC_Root_CA_2022.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/bdacca6f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ff34af3f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/dbff3a01.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Security_Communication_ECC_RootCA1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_C1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Class_2_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/406c9bb1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_C3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Starfield_Services_Root_Certificate_Authority_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/SwissSign_Silver_CA_-_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/99e1b953.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/T-TeleSec_GlobalRoot_Class_3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/14bc7599.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TUBITAK_Kamu_SM_SSL_Kok_Sertifikasi_-_Surum_1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Global_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/7a3adc42.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TWCA_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f459871d.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_ECC_Root_2020.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_Root_CA_-_G1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telekom_Security_TLS_RSA_Root_2023.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TeliaSonera_Root_CA_v1.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Telia_Root_CA_v2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8f103249.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f058632f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-certificates.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9bf03295.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/98aaf404.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TrustAsia_Global_Root_CA_G4.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1cef98f5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/073bfcc5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/2923b3f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f249de83.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/edcbddb5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/emSign_ECC_Root_CA_-_G3.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P256_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9b5697b0.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/1ae85e5e.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/b74d2bd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/Trustwave_Global_ECC_P384_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/d887a5bb.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9aef356c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/TunTrust_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fd64f3fc.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e13665f9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Extended_Validation_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/0f5dc4f3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/da7377f6.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/UCA_Global_G2_Root.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/c01eb047.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/304d27c3.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ed858448.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_ECC_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/f30dd6ad.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/04f60c28.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/vTrus_ECC_Root_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/USERTrust_RSA_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/fc5a8f99.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/35105088.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ee532fd5.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/XRamp_Global_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/706f604c.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/76579174.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/8d86cdd1.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/882de061.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/certSIGN_ROOT_CA_G2.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/5f618aec.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/a9d40e02.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e-Szigno_Root_CA_2017.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/e868b802.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/83e9984f.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ePKI_Root_Certification_Authority.pem not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/ca6e4ad9.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/9d6523ce.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/4b718d9b.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes/kubernetes.io~empty-dir/ca-trust-extracted/pem/directory-hash/869fbf79.0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/containers/registry/f8d22bdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c10,c16 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/6e8bbfac not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/54dd7996 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator/a4f1bb05 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/207129da not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/c1df39e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/containers/cluster-samples-operator-watch/15b8f1cd not reset as customized by admin to system_u:object_r:container_file_t:s0:c9,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3523263858/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..2025_02_23_05_27_49.3256605594/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes/kubernetes.io~configmap/images/images.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/77bd6913 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/2382c1b1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/kube-rbac-proxy/704ce128 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/70d16fe0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/bfb95535 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/containers/machine-api-operator/57a8e8e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c0,c15 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..2025_02_23_05_27_49.3413793711/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/1b9d3e5e not reset as customized by admin to system_u:object_r:container_file_t:s0:c107,c917 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/fddb173c not reset as customized by admin to system_u:object_r:container_file_t:s0:c202,c983 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/containers/kube-apiserver-operator/95d3c6c4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c219,c404 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/bfb5fff5 not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/2aef40aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/9d751cbb-f2e2-430d-9754-c882a5e924a5/containers/check-endpoints/c0391cad not reset as customized by admin to system_u:object_r:container_file_t:s0:c20,c21 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/1119e69d not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/660608b4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager/8220bd53 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/85f99d5c not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/cluster-policy-controller/4b0225f6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/9c2a3394 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-cert-syncer/e820b243 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/1ca52ea0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c776,c1007 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/f614b9022728cf315e60c057852e563e/containers/kube-controller-manager-recovery-controller/e6988e45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c214,c928 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes/kubernetes.io~configmap/mcc-auth-proxy-config/..2025_02_24_06_09_21.2517297950/config-file.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/6655f00b not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/98bc3986 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/machine-config-controller/08e3458a not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/2a191cb0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/6c4eeefb not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/containers/kube-rbac-proxy/f61a549c not reset as customized by admin to system_u:object_r:container_file_t:s0:c4,c17 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/24891863 not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/hostpath-provisioner/fbdfd89c not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/9b63b3bc not reset as customized by admin to system_u:object_r:container_file_t:s0:c37,c572 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/liveness-probe/8acde6d6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/node-driver-registrar/59ecbba3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/containers/csi-provisioner/685d4be3 not reset as customized by admin to system_u:object_r:container_file_t:s0:c318,c553 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..2025_02_24_06_20_07.341639300/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/config.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.client-ca.configmap not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/config/openshift-route-controller-manager.serving-cert.secret not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851 not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..2025_02_24_06_20_07.2950937851/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes/kubernetes.io~configmap/client-ca/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/containers/route-controller-manager/feaea55e not reset as customized by admin to system_u:object_r:container_file_t:s0:c2,c23 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abinitio-runtime-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/accuknox-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aci-containers-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airlock-microgateway/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ako-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloy/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anchore-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-cloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/appdynamics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-dcap-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ccm-node-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cfm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cilium-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloud-native-postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudera-streams-messaging-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudnative-pg/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cnfv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/conjur-follower-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/coroot-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cte-k8s-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-deploy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/digitalai-release-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edb-hcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/elasticsearch-eck-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/federatorai-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fujitsu-enterprise-postgres-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/function-mesh/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/harness-gitops-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hcp-terraform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hpe-ezmeral-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-application-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-directory-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-dr-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-licensing-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infoscale-sds-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infrastructure-asset-orchestrator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-device-plugins-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/intel-kubernetes-power-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-openshift-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8s-triliovault/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-ati-updates/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-framework/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-ingress/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-licensing/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-kcos-sso/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-load-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-loadcore-agents/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nats-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-nimbusmosaic-dusim/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-rest-api-browser-v1/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-appsec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-db/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-diagnostics/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-logging/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-migration/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-msg-broker/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-notifications/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-stats-dashboards/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-storage/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-test-core/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-wap-ui/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keysight-websocket-service/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kong-gateway-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubearmor-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lenovo-locd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memcached-operator-ogaye/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/memory-machine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-enterprise/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netapp-spark-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-adm-agent-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netscaler-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-repository-ha-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nginx-ingress-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nim-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxiq-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nxrm-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odigos-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/open-liberty-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftartifactoryha-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshiftxray-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/operator-certification-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pmem-csi-operator-os/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-component-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/runtime-fabric-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sanstoragecsi-operator-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/smilecdr-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sriov-fec/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-commons-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stackable-zookeeper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-tsc-client-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tawon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tigera-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vcp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/webotx-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/63709497 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/d966b7fd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-utilities/f5773757 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/81c9edb9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/57bf57ee not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/extract-content/86f5e6aa not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/0aabe31d not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/d2af85c2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/containers/registry-server/09d157d9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/3scale-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-acmpca-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigateway-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-apigatewayv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-applicationautoscaling-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-athena-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudfront-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudtrail-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatch-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-cloudwatchlogs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-documentdb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-dynamodb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ec2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecr-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ecs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-efs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eks-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elasticache-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-elbv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-emrcontainers-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-eventbridge-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-iam-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kafka-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-keyspaces-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kinesis-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-kms-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-lambda-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-memorydb-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-mq-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-networkfirewall-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-opensearchservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-organizations-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-pipes-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-prometheusservice-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-rds-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-recyclebin-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-route53resolver-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-s3-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sagemaker-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-secretsmanager-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ses-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sfn-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sns-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-sqs-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-ssm-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ack-wafv2-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/airflow-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alloydb-omni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/alvearie-imaging-ingestion/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/amd-gpu-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/analytics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/annotationlab/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicast-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-api-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurio-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apicurito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/apimatic-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/application-services-metering-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aqua/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/argocd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/assisted-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/authorino-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/automotive-infra/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aws-efs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/awss3-operator-registry/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/azure-service-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/beegfs-csi-driver-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/bpfman-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-k/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/camel-karavan-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cass-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cert-utils-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-aas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-impairment-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cluster-manager/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/codeflare-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-kubevirt-hyperconverged/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-trivy-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/community-windows-machine-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/customized-user-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cxl-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dapr-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datatrucker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dbaas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/debezium-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dell-csm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/deployment-validation-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/devopsinabox/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-amlen-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eclipse-che/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ecr-secret-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/edp-keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eginnovations-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/egressip-ipam-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ember-csi-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/etcd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/eventing-kogito/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/external-secrets-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/falcon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fence-agents-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flink-kubernetes-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k8gb/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/fossul-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/github-arc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitops-primer/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/gitwebhook-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/global-load-balancer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/grafana-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/group-sync-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hawtio-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hazelcast-platform-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hedvig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hive-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/horreum-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/hyperfoil-bundle/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-block-csi-operator-community/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-security-verify-access-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibm-spectrum-scale-csi-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ibmcloud-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/infinispan/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/integrity-shield-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ipfs-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/istio-workspace-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/jaeger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kaoto-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keda/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keepalived-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/keycloak-permissions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/klusterlet/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kogito-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/koku-metrics-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/konveyor-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/korrel8r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kuadrant-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kube-green/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubecost/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubernetes-imagepuller-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/l5-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/layer7-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lbconfig-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/lib-bucket-provisioner/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/limitador-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/logging-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/loki-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/machine-deletion-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mariadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marin3r/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mercury-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/microcks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-atlas-kubernetes/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/mongodb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/move2kube-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multi-nic-cni-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-global-hub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/multicluster-operators-subscription/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/must-gather-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/namespace-configuration-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ncn-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ndmspc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/netobserv-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-community-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nexus-operator-m88i/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nfs-provisioner-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nlp-server/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-discovery-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-healthcheck-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/node-maintenance-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/nsm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oadp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/observability-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/oci-ccm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ocm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/odoo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opendatahub-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openebs/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-nfd-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-node-upgrade-mutex-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/openshift-qiskit-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/opentelemetry-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patch-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/patterns-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pcc-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pelorus-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/percona-xtradb-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/portworx-essentials/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/postgresql/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/proactive-node-scaling-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/project-quay/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometheus-exporter-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/prometurbo/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pubsubplus-eventbroker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pulp-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-cluster-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rabbitmq-messaging-topology-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/reportportal-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/resource-locker-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/rhoas-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ripsaw/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sailoperator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-commerce-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-data-intelligence-observer-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sap-hana-express-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/self-node-remediation/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/service-binding-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/shipwright-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sigstore-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/silicom-sts-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/skupper-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snapscheduler/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/snyk-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/socmmd/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonar-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosivio/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sonataflow-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/sosreport-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/spark-helm-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/special-resource-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/stolostron-engine/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/strimzi-kafka-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/syndesis/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tagger/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tempo-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tf-controller/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/tidb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trident-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/trustify-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ucs-ci-solutions-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/universal-crossplane/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/varnish-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vault-config-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/verticadb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/volume-expander-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/wandb-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/windup-operator/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yaks/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c0fe7256 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/c30319e4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-utilities/e6b1dd45 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/2bb643f0 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/920de426 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/extract-content/70fa1e87 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/a1c12a2f not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/9442e6c7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/containers/registry-server/5b45ec72 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/abot-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aerospike-kubernetes-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/aikit-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzo-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzograph-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/anzounstructured-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cloudbees-ci-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/cockroachdb-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/crunchy-postgres-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/datadog-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/dynatrace-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/entando-k8s-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/flux/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/instana-agent-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/iomesh-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/joget-dx8-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/k10-kasten-operator-term-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubemq-operator-marketplace-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/kubeturbo-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/linstor-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/marketplace-games-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/model-builder-for-vision-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/neuvector-certified-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/ovms-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/pachyderm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/redis-enterprise-operator-cert-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/seldon-deploy-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-paygo-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/starburst-enterprise-helm-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/t8c-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/timemachine-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/vfunction-server-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/xcrypt-operator-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/yugabyte-platform-operator-bundle-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/catalog/zabbix-operator-certified-rhmp/catalog.json not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/00000-1.psg.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/db.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/index.pmt not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/main.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/db/overflow.pix not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/catalog-content/cache/pogreb.v1/digest not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes/kubernetes.io~empty-dir/utilities/copy-content not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/3c9f3a59 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/1091c11b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-utilities/9a6821c6 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/ec0c35e2 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/517f37e7 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/extract-content/6214fe78 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/ba189c8b not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/351e4f31 not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/containers/registry-server/c0f219ff not reset as customized by admin to system_u:object_r:container_file_t:s0:c7,c13 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/8069f607 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/559c3d82 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/wait-for-host-port/605ad488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/148df488 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/3bf6dcb4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler/022a2feb not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/938c3924 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/729fe23e not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-cert-syncer/1fd5cbd4 not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/a96697e1 not reset as customized by admin to system_u:object_r:container_file_t:s0:c378,c723 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/e155ddca not reset as customized by admin to system_u:object_r:container_file_t:s0:c133,c223 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/3dcd261975c3d6b9a6ad6367fd4facd3/containers/kube-scheduler-recovery-controller/10dd0e0f not reset as customized by admin to system_u:object_r:container_file_t:s0:c247,c522 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..2025_02_24_06_09_35.3018472960/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-trusted-ca-bundle/ca-bundle.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..2025_02_24_06_09_35.4262376737/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/audit-policies/audit.yaml not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..2025_02_24_06_09_35.2630275752/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-cliconfig/v4-0-config-system-cliconfig not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..2025_02_24_06_09_35.2376963788/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/..data not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes/kubernetes.io~configmap/v4-0-config-system-service-ca/service-ca.crt not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/etc-hosts not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/6f2c8392 not reset as customized by admin to system_u:object_r:container_file_t:s0:c267,c588 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/containers/oauth-openshift/bd241ad9 not reset as customized by admin to system_u:object_r:container_file_t:s0:c682,c947 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/csi-hostpath not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/csi-hostpath/csi.sock not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983 not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/vol_data.json not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: /var/lib/kubelet/plugins_registry not reset as customized by admin to system_u:object_r:container_file_t:s0 Feb 02 12:51:12 crc restorecon[4702]: Relabeled /var/usrlocal/bin/kubenswrapper from system_u:object_r:bin_t:s0 to system_u:object_r:kubelet_exec_t:s0 Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --container-runtime-endpoint has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --minimum-container-ttl-duration has been deprecated, Use --eviction-hard or --eviction-soft instead. Will be removed in a future version. Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --volume-plugin-dir has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --register-with-taints has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --pod-infra-container-image has been deprecated, will be removed in a future release. Image garbage collector will get sandbox image information from CRI. Feb 02 12:51:13 crc kubenswrapper[4703]: Flag --system-reserved has been deprecated, This parameter should be set via the config file specified by the Kubelet's --config flag. See https://kubernetes.io/docs/tasks/administer-cluster/kubelet-config-file/ for more information. Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.627673 4703 server.go:211] "--pod-infra-container-image will not be pruned by the image garbage collector in kubelet and should also be set in the remote runtime" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630194 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630211 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630216 4703 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630221 4703 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630230 4703 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630234 4703 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630237 4703 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630241 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630245 4703 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630249 4703 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630253 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630257 4703 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630260 4703 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630264 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630283 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630288 4703 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630293 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630297 4703 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630301 4703 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630305 4703 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630309 4703 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630312 4703 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630315 4703 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630319 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630323 4703 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630326 4703 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630329 4703 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630333 4703 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630336 4703 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630340 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630343 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630347 4703 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630350 4703 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630353 4703 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630357 4703 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630361 4703 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630364 4703 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630369 4703 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630374 4703 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630378 4703 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630382 4703 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630386 4703 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630392 4703 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630397 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630401 4703 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630404 4703 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630408 4703 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630411 4703 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630414 4703 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630418 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630421 4703 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630425 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630428 4703 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630432 4703 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630437 4703 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630441 4703 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630445 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630448 4703 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630452 4703 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630455 4703 feature_gate.go:330] unrecognized feature gate: Example Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630459 4703 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630462 4703 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630465 4703 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630469 4703 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630472 4703 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630477 4703 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630482 4703 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630486 4703 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630490 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630493 4703 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.630497 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631585 4703 flags.go:64] FLAG: --address="0.0.0.0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631597 4703 flags.go:64] FLAG: --allowed-unsafe-sysctls="[]" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631604 4703 flags.go:64] FLAG: --anonymous-auth="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631610 4703 flags.go:64] FLAG: --application-metrics-count-limit="100" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631618 4703 flags.go:64] FLAG: --authentication-token-webhook="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631623 4703 flags.go:64] FLAG: --authentication-token-webhook-cache-ttl="2m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631629 4703 flags.go:64] FLAG: --authorization-mode="AlwaysAllow" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631634 4703 flags.go:64] FLAG: --authorization-webhook-cache-authorized-ttl="5m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631638 4703 flags.go:64] FLAG: --authorization-webhook-cache-unauthorized-ttl="30s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631642 4703 flags.go:64] FLAG: --boot-id-file="/proc/sys/kernel/random/boot_id" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631647 4703 flags.go:64] FLAG: --bootstrap-kubeconfig="/etc/kubernetes/kubeconfig" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631652 4703 flags.go:64] FLAG: --cert-dir="/var/lib/kubelet/pki" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631656 4703 flags.go:64] FLAG: --cgroup-driver="cgroupfs" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631661 4703 flags.go:64] FLAG: --cgroup-root="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631665 4703 flags.go:64] FLAG: --cgroups-per-qos="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631670 4703 flags.go:64] FLAG: --client-ca-file="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631674 4703 flags.go:64] FLAG: --cloud-config="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631678 4703 flags.go:64] FLAG: --cloud-provider="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631681 4703 flags.go:64] FLAG: --cluster-dns="[]" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631688 4703 flags.go:64] FLAG: --cluster-domain="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631692 4703 flags.go:64] FLAG: --config="/etc/kubernetes/kubelet.conf" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631696 4703 flags.go:64] FLAG: --config-dir="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631700 4703 flags.go:64] FLAG: --container-hints="/etc/cadvisor/container_hints.json" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631704 4703 flags.go:64] FLAG: --container-log-max-files="5" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631710 4703 flags.go:64] FLAG: --container-log-max-size="10Mi" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631714 4703 flags.go:64] FLAG: --container-runtime-endpoint="/var/run/crio/crio.sock" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631718 4703 flags.go:64] FLAG: --containerd="/run/containerd/containerd.sock" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631722 4703 flags.go:64] FLAG: --containerd-namespace="k8s.io" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631727 4703 flags.go:64] FLAG: --contention-profiling="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631731 4703 flags.go:64] FLAG: --cpu-cfs-quota="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631738 4703 flags.go:64] FLAG: --cpu-cfs-quota-period="100ms" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631742 4703 flags.go:64] FLAG: --cpu-manager-policy="none" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631750 4703 flags.go:64] FLAG: --cpu-manager-policy-options="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631755 4703 flags.go:64] FLAG: --cpu-manager-reconcile-period="10s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631760 4703 flags.go:64] FLAG: --enable-controller-attach-detach="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631763 4703 flags.go:64] FLAG: --enable-debugging-handlers="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631768 4703 flags.go:64] FLAG: --enable-load-reader="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631772 4703 flags.go:64] FLAG: --enable-server="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631776 4703 flags.go:64] FLAG: --enforce-node-allocatable="[pods]" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631781 4703 flags.go:64] FLAG: --event-burst="100" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631786 4703 flags.go:64] FLAG: --event-qps="50" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631790 4703 flags.go:64] FLAG: --event-storage-age-limit="default=0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631794 4703 flags.go:64] FLAG: --event-storage-event-limit="default=0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631800 4703 flags.go:64] FLAG: --eviction-hard="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631805 4703 flags.go:64] FLAG: --eviction-max-pod-grace-period="0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631809 4703 flags.go:64] FLAG: --eviction-minimum-reclaim="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631813 4703 flags.go:64] FLAG: --eviction-pressure-transition-period="5m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631817 4703 flags.go:64] FLAG: --eviction-soft="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631821 4703 flags.go:64] FLAG: --eviction-soft-grace-period="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631825 4703 flags.go:64] FLAG: --exit-on-lock-contention="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631829 4703 flags.go:64] FLAG: --experimental-allocatable-ignore-eviction="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631833 4703 flags.go:64] FLAG: --experimental-mounter-path="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631837 4703 flags.go:64] FLAG: --fail-cgroupv1="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631841 4703 flags.go:64] FLAG: --fail-swap-on="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631845 4703 flags.go:64] FLAG: --feature-gates="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631850 4703 flags.go:64] FLAG: --file-check-frequency="20s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631854 4703 flags.go:64] FLAG: --global-housekeeping-interval="1m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631858 4703 flags.go:64] FLAG: --hairpin-mode="promiscuous-bridge" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631862 4703 flags.go:64] FLAG: --healthz-bind-address="127.0.0.1" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631866 4703 flags.go:64] FLAG: --healthz-port="10248" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631870 4703 flags.go:64] FLAG: --help="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631874 4703 flags.go:64] FLAG: --hostname-override="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631880 4703 flags.go:64] FLAG: --housekeeping-interval="10s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631884 4703 flags.go:64] FLAG: --http-check-frequency="20s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631888 4703 flags.go:64] FLAG: --image-credential-provider-bin-dir="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631892 4703 flags.go:64] FLAG: --image-credential-provider-config="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631896 4703 flags.go:64] FLAG: --image-gc-high-threshold="85" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631901 4703 flags.go:64] FLAG: --image-gc-low-threshold="80" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631905 4703 flags.go:64] FLAG: --image-service-endpoint="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631909 4703 flags.go:64] FLAG: --kernel-memcg-notification="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631913 4703 flags.go:64] FLAG: --kube-api-burst="100" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631917 4703 flags.go:64] FLAG: --kube-api-content-type="application/vnd.kubernetes.protobuf" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631921 4703 flags.go:64] FLAG: --kube-api-qps="50" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631925 4703 flags.go:64] FLAG: --kube-reserved="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631930 4703 flags.go:64] FLAG: --kube-reserved-cgroup="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631935 4703 flags.go:64] FLAG: --kubeconfig="/var/lib/kubelet/kubeconfig" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631939 4703 flags.go:64] FLAG: --kubelet-cgroups="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631943 4703 flags.go:64] FLAG: --local-storage-capacity-isolation="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631947 4703 flags.go:64] FLAG: --lock-file="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631951 4703 flags.go:64] FLAG: --log-cadvisor-usage="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631955 4703 flags.go:64] FLAG: --log-flush-frequency="5s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631959 4703 flags.go:64] FLAG: --log-json-info-buffer-size="0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631965 4703 flags.go:64] FLAG: --log-json-split-stream="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631969 4703 flags.go:64] FLAG: --log-text-info-buffer-size="0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631973 4703 flags.go:64] FLAG: --log-text-split-stream="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631977 4703 flags.go:64] FLAG: --logging-format="text" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631981 4703 flags.go:64] FLAG: --machine-id-file="/etc/machine-id,/var/lib/dbus/machine-id" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631985 4703 flags.go:64] FLAG: --make-iptables-util-chains="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631989 4703 flags.go:64] FLAG: --manifest-url="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631993 4703 flags.go:64] FLAG: --manifest-url-header="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.631998 4703 flags.go:64] FLAG: --max-housekeeping-interval="15s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632002 4703 flags.go:64] FLAG: --max-open-files="1000000" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632007 4703 flags.go:64] FLAG: --max-pods="110" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632011 4703 flags.go:64] FLAG: --maximum-dead-containers="-1" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632017 4703 flags.go:64] FLAG: --maximum-dead-containers-per-container="1" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632021 4703 flags.go:64] FLAG: --memory-manager-policy="None" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632025 4703 flags.go:64] FLAG: --minimum-container-ttl-duration="6m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632029 4703 flags.go:64] FLAG: --minimum-image-ttl-duration="2m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632033 4703 flags.go:64] FLAG: --node-ip="192.168.126.11" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632037 4703 flags.go:64] FLAG: --node-labels="node-role.kubernetes.io/control-plane=,node-role.kubernetes.io/master=,node.openshift.io/os_id=rhcos" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632046 4703 flags.go:64] FLAG: --node-status-max-images="50" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632050 4703 flags.go:64] FLAG: --node-status-update-frequency="10s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632054 4703 flags.go:64] FLAG: --oom-score-adj="-999" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632058 4703 flags.go:64] FLAG: --pod-cidr="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632062 4703 flags.go:64] FLAG: --pod-infra-container-image="quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:33549946e22a9ffa738fd94b1345f90921bc8f92fa6137784cb33c77ad806f9d" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632069 4703 flags.go:64] FLAG: --pod-manifest-path="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632072 4703 flags.go:64] FLAG: --pod-max-pids="-1" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632076 4703 flags.go:64] FLAG: --pods-per-core="0" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632083 4703 flags.go:64] FLAG: --port="10250" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632087 4703 flags.go:64] FLAG: --protect-kernel-defaults="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632091 4703 flags.go:64] FLAG: --provider-id="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632095 4703 flags.go:64] FLAG: --qos-reserved="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632099 4703 flags.go:64] FLAG: --read-only-port="10255" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632103 4703 flags.go:64] FLAG: --register-node="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632107 4703 flags.go:64] FLAG: --register-schedulable="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632111 4703 flags.go:64] FLAG: --register-with-taints="node-role.kubernetes.io/master=:NoSchedule" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632118 4703 flags.go:64] FLAG: --registry-burst="10" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632121 4703 flags.go:64] FLAG: --registry-qps="5" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632125 4703 flags.go:64] FLAG: --reserved-cpus="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632129 4703 flags.go:64] FLAG: --reserved-memory="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632134 4703 flags.go:64] FLAG: --resolv-conf="/etc/resolv.conf" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632138 4703 flags.go:64] FLAG: --root-dir="/var/lib/kubelet" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632143 4703 flags.go:64] FLAG: --rotate-certificates="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632147 4703 flags.go:64] FLAG: --rotate-server-certificates="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632151 4703 flags.go:64] FLAG: --runonce="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632155 4703 flags.go:64] FLAG: --runtime-cgroups="/system.slice/crio.service" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632176 4703 flags.go:64] FLAG: --runtime-request-timeout="2m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632180 4703 flags.go:64] FLAG: --seccomp-default="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632184 4703 flags.go:64] FLAG: --serialize-image-pulls="true" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632188 4703 flags.go:64] FLAG: --storage-driver-buffer-duration="1m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632192 4703 flags.go:64] FLAG: --storage-driver-db="cadvisor" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632196 4703 flags.go:64] FLAG: --storage-driver-host="localhost:8086" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632200 4703 flags.go:64] FLAG: --storage-driver-password="root" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632204 4703 flags.go:64] FLAG: --storage-driver-secure="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632208 4703 flags.go:64] FLAG: --storage-driver-table="stats" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632212 4703 flags.go:64] FLAG: --storage-driver-user="root" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632217 4703 flags.go:64] FLAG: --streaming-connection-idle-timeout="4h0m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632221 4703 flags.go:64] FLAG: --sync-frequency="1m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632225 4703 flags.go:64] FLAG: --system-cgroups="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632229 4703 flags.go:64] FLAG: --system-reserved="cpu=200m,ephemeral-storage=350Mi,memory=350Mi" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632237 4703 flags.go:64] FLAG: --system-reserved-cgroup="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632241 4703 flags.go:64] FLAG: --tls-cert-file="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632245 4703 flags.go:64] FLAG: --tls-cipher-suites="[]" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632250 4703 flags.go:64] FLAG: --tls-min-version="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632254 4703 flags.go:64] FLAG: --tls-private-key-file="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632258 4703 flags.go:64] FLAG: --topology-manager-policy="none" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632262 4703 flags.go:64] FLAG: --topology-manager-policy-options="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632266 4703 flags.go:64] FLAG: --topology-manager-scope="container" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632284 4703 flags.go:64] FLAG: --v="2" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632291 4703 flags.go:64] FLAG: --version="false" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632298 4703 flags.go:64] FLAG: --vmodule="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632304 4703 flags.go:64] FLAG: --volume-plugin-dir="/etc/kubernetes/kubelet-plugins/volume/exec" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.632308 4703 flags.go:64] FLAG: --volume-stats-agg-period="1m0s" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632407 4703 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632413 4703 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632417 4703 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632421 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632424 4703 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632430 4703 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632433 4703 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632437 4703 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632440 4703 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632444 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632447 4703 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632451 4703 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632455 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632458 4703 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632462 4703 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632465 4703 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632470 4703 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632474 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632478 4703 feature_gate.go:330] unrecognized feature gate: Example Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632485 4703 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632489 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632493 4703 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632497 4703 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632508 4703 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632512 4703 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632516 4703 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632520 4703 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632523 4703 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632527 4703 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632531 4703 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632534 4703 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632538 4703 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632541 4703 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632544 4703 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632548 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632552 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632555 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632561 4703 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632565 4703 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632568 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632572 4703 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632576 4703 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632579 4703 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632583 4703 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632586 4703 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632590 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.632593 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633734 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633741 4703 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633745 4703 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633749 4703 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633755 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633759 4703 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633763 4703 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633767 4703 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633771 4703 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633775 4703 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633778 4703 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633782 4703 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633791 4703 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633795 4703 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633798 4703 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633803 4703 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633807 4703 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633812 4703 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633816 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633820 4703 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633824 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633828 4703 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633833 4703 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.633837 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.634725 4703 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.645660 4703 server.go:491] "Kubelet version" kubeletVersion="v1.31.5" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.645706 4703 server.go:493] "Golang settings" GOGC="" GOMAXPROCS="" GOTRACEBACK="" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645792 4703 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645803 4703 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645809 4703 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645815 4703 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645821 4703 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645826 4703 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645831 4703 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645836 4703 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645841 4703 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645846 4703 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645851 4703 feature_gate.go:330] unrecognized feature gate: Example Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645855 4703 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645859 4703 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645863 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645867 4703 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645871 4703 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645874 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645878 4703 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645881 4703 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645885 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645888 4703 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645892 4703 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645895 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645899 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645902 4703 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645906 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645909 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645913 4703 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645916 4703 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645920 4703 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645923 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645927 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645932 4703 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645937 4703 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645942 4703 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645946 4703 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645949 4703 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645953 4703 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645957 4703 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645960 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645964 4703 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645967 4703 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645971 4703 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645975 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645978 4703 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645982 4703 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645985 4703 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645990 4703 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645996 4703 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.645999 4703 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646004 4703 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646007 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646011 4703 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646015 4703 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646019 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646024 4703 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646029 4703 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646032 4703 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646038 4703 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646042 4703 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646046 4703 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646049 4703 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646053 4703 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646057 4703 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646060 4703 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646064 4703 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646068 4703 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646071 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646075 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646078 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646082 4703 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.646090 4703 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646195 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstall Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646202 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIOperatorDisableMachineHealthCheckController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646208 4703 feature_gate.go:330] unrecognized feature gate: PinnedImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646212 4703 feature_gate.go:330] unrecognized feature gate: ClusterMonitoringConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646216 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImagesAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646221 4703 feature_gate.go:353] Setting GA feature gate DisableKubeletCloudCredentialProviders=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646226 4703 feature_gate.go:330] unrecognized feature gate: AdminNetworkPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646229 4703 feature_gate.go:330] unrecognized feature gate: GCPClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646233 4703 feature_gate.go:330] unrecognized feature gate: ExternalOIDC Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646237 4703 feature_gate.go:330] unrecognized feature gate: PrivateHostedZoneAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646241 4703 feature_gate.go:330] unrecognized feature gate: SignatureStores Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646245 4703 feature_gate.go:330] unrecognized feature gate: GatewayAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646249 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiVCenters Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646253 4703 feature_gate.go:330] unrecognized feature gate: BareMetalLoadBalancer Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646257 4703 feature_gate.go:330] unrecognized feature gate: ClusterAPIInstallIBMCloud Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646261 4703 feature_gate.go:330] unrecognized feature gate: PersistentIPsForVirtualization Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646266 4703 feature_gate.go:330] unrecognized feature gate: EtcdBackendQuota Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646287 4703 feature_gate.go:330] unrecognized feature gate: AdditionalRoutingCapabilities Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646294 4703 feature_gate.go:330] unrecognized feature gate: SetEIPForNLBIngressController Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646303 4703 feature_gate.go:330] unrecognized feature gate: DNSNameResolver Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646307 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIProviderOpenStack Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646312 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAzure Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646316 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerLBSubnetsAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646322 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfigAPI Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646327 4703 feature_gate.go:330] unrecognized feature gate: AutomatedEtcdBackup Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646331 4703 feature_gate.go:330] unrecognized feature gate: MinimumKubeletVersion Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646335 4703 feature_gate.go:330] unrecognized feature gate: PlatformOperators Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646340 4703 feature_gate.go:330] unrecognized feature gate: ChunkSizeMiB Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646344 4703 feature_gate.go:330] unrecognized feature gate: AWSEFSDriverVolumeMetrics Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646347 4703 feature_gate.go:330] unrecognized feature gate: RouteAdvertisements Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646351 4703 feature_gate.go:330] unrecognized feature gate: VSphereDriverConfiguration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646354 4703 feature_gate.go:330] unrecognized feature gate: IngressControllerDynamicConfigurationManager Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646358 4703 feature_gate.go:330] unrecognized feature gate: ManagedBootImages Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646362 4703 feature_gate.go:330] unrecognized feature gate: OVNObservability Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646367 4703 feature_gate.go:330] unrecognized feature gate: BootcNodeManagement Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646371 4703 feature_gate.go:330] unrecognized feature gate: AzureWorkloadIdentity Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646374 4703 feature_gate.go:330] unrecognized feature gate: MachineConfigNodes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646378 4703 feature_gate.go:330] unrecognized feature gate: NutanixMultiSubnets Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646381 4703 feature_gate.go:330] unrecognized feature gate: NetworkLiveMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646385 4703 feature_gate.go:330] unrecognized feature gate: MachineAPIMigration Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646389 4703 feature_gate.go:330] unrecognized feature gate: ImageStreamImportMode Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646393 4703 feature_gate.go:330] unrecognized feature gate: BuildCSIVolumes Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646396 4703 feature_gate.go:330] unrecognized feature gate: GCPLabelsTags Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646399 4703 feature_gate.go:330] unrecognized feature gate: InsightsRuntimeExtractor Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646403 4703 feature_gate.go:330] unrecognized feature gate: NetworkDiagnosticsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646406 4703 feature_gate.go:330] unrecognized feature gate: SigstoreImageVerification Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646410 4703 feature_gate.go:330] unrecognized feature gate: HardwareSpeed Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646415 4703 feature_gate.go:351] Setting deprecated feature gate KMSv1=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646419 4703 feature_gate.go:330] unrecognized feature gate: UpgradeStatus Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646423 4703 feature_gate.go:330] unrecognized feature gate: NewOLM Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646427 4703 feature_gate.go:330] unrecognized feature gate: OpenShiftPodSecurityAdmission Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646430 4703 feature_gate.go:330] unrecognized feature gate: NodeDisruptionPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646434 4703 feature_gate.go:330] unrecognized feature gate: VSphereMultiNetworks Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646437 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallGCP Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646442 4703 feature_gate.go:353] Setting GA feature gate CloudDualStackNodeIPs=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646446 4703 feature_gate.go:330] unrecognized feature gate: MultiArchInstallAWS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646450 4703 feature_gate.go:330] unrecognized feature gate: InsightsConfig Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646454 4703 feature_gate.go:330] unrecognized feature gate: AWSClusterHostedDNS Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646458 4703 feature_gate.go:330] unrecognized feature gate: AlibabaPlatform Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646462 4703 feature_gate.go:330] unrecognized feature gate: NetworkSegmentation Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646465 4703 feature_gate.go:330] unrecognized feature gate: MetricsCollectionProfiles Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646469 4703 feature_gate.go:330] unrecognized feature gate: Example Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646472 4703 feature_gate.go:330] unrecognized feature gate: OnClusterBuild Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646476 4703 feature_gate.go:330] unrecognized feature gate: InsightsOnDemandDataGather Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646480 4703 feature_gate.go:330] unrecognized feature gate: VSphereControlPlaneMachineSet Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646483 4703 feature_gate.go:330] unrecognized feature gate: VolumeGroupSnapshot Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646487 4703 feature_gate.go:330] unrecognized feature gate: CSIDriverSharedResource Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646490 4703 feature_gate.go:330] unrecognized feature gate: ConsolePluginContentSecurityPolicy Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646494 4703 feature_gate.go:330] unrecognized feature gate: VSphereStaticIPs Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646498 4703 feature_gate.go:353] Setting GA feature gate ValidatingAdmissionPolicy=true. It will be removed in a future release. Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.646503 4703 feature_gate.go:330] unrecognized feature gate: MixedCPUsAllocation Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.646510 4703 feature_gate.go:386] feature gates: {map[CloudDualStackNodeIPs:true DisableKubeletCloudCredentialProviders:true DynamicResourceAllocation:false EventedPLEG:false KMSv1:true MaxUnavailableStatefulSet:false NodeSwap:false ProcMountType:false RouteExternalCertificate:false ServiceAccountTokenNodeBinding:false TranslateStreamCloseWebsocketRequests:false UserNamespacesPodSecurityStandards:false UserNamespacesSupport:false ValidatingAdmissionPolicy:true VolumeAttributesClass:false]} Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.648709 4703 server.go:940] "Client rotation is on, will bootstrap in background" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.653163 4703 bootstrap.go:85] "Current kubeconfig file contents are still valid, no bootstrap necessary" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.653351 4703 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-client-current.pem". Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.654898 4703 server.go:997] "Starting client certificate rotation" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.654975 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate rotation is enabled Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.656294 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2026-02-24 05:52:08 +0000 UTC, rotation deadline is 2026-01-13 21:23:55.894376409 +0000 UTC Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.656427 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.704934 4703 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.707495 4703 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.707537 4703 dynamic_cafile_content.go:161] "Starting controller" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.747351 4703 log.go:25] "Validated CRI v1 runtime API" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.784652 4703 log.go:25] "Validated CRI v1 image API" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.787375 4703 server.go:1437] "Using cgroup driver setting received from the CRI runtime" cgroupDriver="systemd" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.795412 4703 fs.go:133] Filesystem UUIDs: map[0b076daa-c26a-46d2-b3a6-72a8dbc6e257:/dev/vda4 2026-02-02-12-47-07-00:/dev/sr0 7B77-95E7:/dev/vda2 de0497b0-db1b-465a-b278-03db02455c71:/dev/vda3] Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.795490 4703 fs.go:134] Filesystem partitions: map[/dev/shm:{mountpoint:/dev/shm major:0 minor:22 fsType:tmpfs blockSize:0} /dev/vda3:{mountpoint:/boot major:252 minor:3 fsType:ext4 blockSize:0} /dev/vda4:{mountpoint:/var major:252 minor:4 fsType:xfs blockSize:0} /run:{mountpoint:/run major:0 minor:24 fsType:tmpfs blockSize:0} /run/user/1000:{mountpoint:/run/user/1000 major:0 minor:42 fsType:tmpfs blockSize:0} /tmp:{mountpoint:/tmp major:0 minor:30 fsType:tmpfs blockSize:0} /var/lib/etcd:{mountpoint:/var/lib/etcd major:0 minor:43 fsType:tmpfs blockSize:0}] Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.812460 4703 manager.go:217] Machine: {Timestamp:2026-02-02 12:51:13.809868597 +0000 UTC m=+0.825076151 CPUVendorID:AuthenticAMD NumCores:12 NumPhysicalCores:1 NumSockets:12 CpuFrequency:2800000 MemoryCapacity:33654136832 SwapCapacity:0 MemoryByType:map[] NVMInfo:{MemoryModeCapacity:0 AppDirectModeCapacity:0 AvgPowerBudget:0} HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] MachineID:21801e6708c44f15b81395eb736a7cec SystemUUID:50e23aaa-7ae0-4b56-bf68-da927f666ae9 BootID:1293eb9d-82ee-4ca6-9a67-93a06ad7a634 Filesystems:[{Device:/dev/vda4 DeviceMajor:252 DeviceMinor:4 Capacity:85292941312 Type:vfs Inodes:41679680 HasInodes:true} {Device:/tmp DeviceMajor:0 DeviceMinor:30 Capacity:16827068416 Type:vfs Inodes:1048576 HasInodes:true} {Device:/dev/vda3 DeviceMajor:252 DeviceMinor:3 Capacity:366869504 Type:vfs Inodes:98304 HasInodes:true} {Device:/run/user/1000 DeviceMajor:0 DeviceMinor:42 Capacity:3365412864 Type:vfs Inodes:821634 HasInodes:true} {Device:/var/lib/etcd DeviceMajor:0 DeviceMinor:43 Capacity:1073741824 Type:vfs Inodes:4108171 HasInodes:true} {Device:/dev/shm DeviceMajor:0 DeviceMinor:22 Capacity:16827068416 Type:vfs Inodes:4108171 HasInodes:true} {Device:/run DeviceMajor:0 DeviceMinor:24 Capacity:6730829824 Type:vfs Inodes:819200 HasInodes:true}] DiskMap:map[252:0:{Name:vda Major:252 Minor:0 Size:214748364800 Scheduler:none}] NetworkDevices:[{Name:br-ex MacAddress:fa:16:3e:c6:41:6d Speed:0 Mtu:1500} {Name:br-int MacAddress:d6:39:55:2e:22:71 Speed:0 Mtu:1400} {Name:ens3 MacAddress:fa:16:3e:c6:41:6d Speed:-1 Mtu:1500} {Name:ens7 MacAddress:fa:16:3e:68:a5:70 Speed:-1 Mtu:1500} {Name:ens7.20 MacAddress:52:54:00:1f:6c:6b Speed:-1 Mtu:1496} {Name:ens7.21 MacAddress:52:54:00:91:91:fd Speed:-1 Mtu:1496} {Name:ens7.22 MacAddress:52:54:00:6f:4b:6d Speed:-1 Mtu:1496} {Name:eth10 MacAddress:ce:ce:39:64:3c:66 Speed:0 Mtu:1500} {Name:ovn-k8s-mp0 MacAddress:0a:58:0a:d9:00:02 Speed:0 Mtu:1400} {Name:ovs-system MacAddress:92:83:59:06:67:05 Speed:0 Mtu:1500}] Topology:[{Id:0 Memory:33654136832 HugePages:[{PageSize:1048576 NumPages:0} {PageSize:2048 NumPages:0}] Cores:[{Id:0 Threads:[0] Caches:[{Id:0 Size:32768 Type:Data Level:1} {Id:0 Size:32768 Type:Instruction Level:1} {Id:0 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:0 Size:16777216 Type:Unified Level:3}] SocketID:0 BookID: DrawerID:} {Id:0 Threads:[1] Caches:[{Id:1 Size:32768 Type:Data Level:1} {Id:1 Size:32768 Type:Instruction Level:1} {Id:1 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:1 Size:16777216 Type:Unified Level:3}] SocketID:1 BookID: DrawerID:} {Id:0 Threads:[10] Caches:[{Id:10 Size:32768 Type:Data Level:1} {Id:10 Size:32768 Type:Instruction Level:1} {Id:10 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:10 Size:16777216 Type:Unified Level:3}] SocketID:10 BookID: DrawerID:} {Id:0 Threads:[11] Caches:[{Id:11 Size:32768 Type:Data Level:1} {Id:11 Size:32768 Type:Instruction Level:1} {Id:11 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:11 Size:16777216 Type:Unified Level:3}] SocketID:11 BookID: DrawerID:} {Id:0 Threads:[2] Caches:[{Id:2 Size:32768 Type:Data Level:1} {Id:2 Size:32768 Type:Instruction Level:1} {Id:2 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:2 Size:16777216 Type:Unified Level:3}] SocketID:2 BookID: DrawerID:} {Id:0 Threads:[3] Caches:[{Id:3 Size:32768 Type:Data Level:1} {Id:3 Size:32768 Type:Instruction Level:1} {Id:3 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:3 Size:16777216 Type:Unified Level:3}] SocketID:3 BookID: DrawerID:} {Id:0 Threads:[4] Caches:[{Id:4 Size:32768 Type:Data Level:1} {Id:4 Size:32768 Type:Instruction Level:1} {Id:4 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:4 Size:16777216 Type:Unified Level:3}] SocketID:4 BookID: DrawerID:} {Id:0 Threads:[5] Caches:[{Id:5 Size:32768 Type:Data Level:1} {Id:5 Size:32768 Type:Instruction Level:1} {Id:5 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:5 Size:16777216 Type:Unified Level:3}] SocketID:5 BookID: DrawerID:} {Id:0 Threads:[6] Caches:[{Id:6 Size:32768 Type:Data Level:1} {Id:6 Size:32768 Type:Instruction Level:1} {Id:6 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:6 Size:16777216 Type:Unified Level:3}] SocketID:6 BookID: DrawerID:} {Id:0 Threads:[7] Caches:[{Id:7 Size:32768 Type:Data Level:1} {Id:7 Size:32768 Type:Instruction Level:1} {Id:7 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:7 Size:16777216 Type:Unified Level:3}] SocketID:7 BookID: DrawerID:} {Id:0 Threads:[8] Caches:[{Id:8 Size:32768 Type:Data Level:1} {Id:8 Size:32768 Type:Instruction Level:1} {Id:8 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:8 Size:16777216 Type:Unified Level:3}] SocketID:8 BookID: DrawerID:} {Id:0 Threads:[9] Caches:[{Id:9 Size:32768 Type:Data Level:1} {Id:9 Size:32768 Type:Instruction Level:1} {Id:9 Size:524288 Type:Unified Level:2}] UncoreCaches:[{Id:9 Size:16777216 Type:Unified Level:3}] SocketID:9 BookID: DrawerID:}] Caches:[] Distances:[10]}] CloudProvider:Unknown InstanceType:Unknown InstanceID:None} Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.812717 4703 manager_no_libpfm.go:29] cAdvisor is build without cgo and/or libpfm support. Perf event counters are not available. Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.812907 4703 manager.go:233] Version: {KernelVersion:5.14.0-427.50.2.el9_4.x86_64 ContainerOsVersion:Red Hat Enterprise Linux CoreOS 418.94.202502100215-0 DockerVersion: DockerAPIVersion: CadvisorVersion: CadvisorRevision:} Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.813372 4703 swap_util.go:113] "Swap is on" /proc/swaps contents="Filename\t\t\t\tType\t\tSize\t\tUsed\t\tPriority" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.813566 4703 container_manager_linux.go:267] "Container manager verified user specified cgroup-root exists" cgroupRoot=[] Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.813609 4703 container_manager_linux.go:272] "Creating Container Manager object based on Node Config" nodeConfig={"NodeName":"crc","RuntimeCgroupsName":"/system.slice/crio.service","SystemCgroupsName":"/system.slice","KubeletCgroupsName":"","KubeletOOMScoreAdj":-999,"ContainerRuntime":"","CgroupsPerQOS":true,"CgroupRoot":"/","CgroupDriver":"systemd","KubeletRootDir":"/var/lib/kubelet","ProtectKernelDefaults":true,"KubeReservedCgroupName":"","SystemReservedCgroupName":"","ReservedSystemCPUs":{},"EnforceNodeAllocatable":{"pods":{}},"KubeReserved":null,"SystemReserved":{"cpu":"200m","ephemeral-storage":"350Mi","memory":"350Mi"},"HardEvictionThresholds":[{"Signal":"memory.available","Operator":"LessThan","Value":{"Quantity":"100Mi","Percentage":0},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.1},"GracePeriod":0,"MinReclaim":null},{"Signal":"nodefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.available","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.15},"GracePeriod":0,"MinReclaim":null},{"Signal":"imagefs.inodesFree","Operator":"LessThan","Value":{"Quantity":null,"Percentage":0.05},"GracePeriod":0,"MinReclaim":null}],"QOSReserved":{},"CPUManagerPolicy":"none","CPUManagerPolicyOptions":null,"TopologyManagerScope":"container","CPUManagerReconcilePeriod":10000000000,"ExperimentalMemoryManagerPolicy":"None","ExperimentalMemoryManagerReservedMemory":null,"PodPidsLimit":4096,"EnforceCPULimits":true,"CPUCFSQuotaPeriod":100000000,"TopologyManagerPolicy":"none","TopologyManagerPolicyOptions":null,"CgroupVersion":2} Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.821070 4703 topology_manager.go:138] "Creating topology manager with none policy" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.821118 4703 container_manager_linux.go:303] "Creating device plugin manager" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.821928 4703 manager.go:142] "Creating Device Plugin manager" path="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.821983 4703 server.go:66] "Creating device plugin registration server" version="v1beta1" socket="/var/lib/kubelet/device-plugins/kubelet.sock" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.822348 4703 state_mem.go:36] "Initialized new in-memory state store" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.822562 4703 server.go:1245] "Using root directory" path="/var/lib/kubelet" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.828078 4703 kubelet.go:418] "Attempting to sync node with API server" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.828121 4703 kubelet.go:313] "Adding static pod path" path="/etc/kubernetes/manifests" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.828153 4703 file.go:69] "Watching path" path="/etc/kubernetes/manifests" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.828178 4703 kubelet.go:324] "Adding apiserver pod source" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.828216 4703 apiserver.go:42] "Waiting for node sync before watching apiserver pods" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.833186 4703 kuberuntime_manager.go:262] "Container runtime initialized" containerRuntime="cri-o" version="1.31.5-4.rhaos4.18.gitdad78d5.el9" apiVersion="v1" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.833731 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.833803 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.833944 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.834081 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.834419 4703 certificate_store.go:130] Loading cert/key pair from "/var/lib/kubelet/pki/kubelet-server-current.pem". Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.837422 4703 kubelet.go:854] "Not starting ClusterTrustBundle informer because we are in static kubelet mode" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839806 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/portworx-volume" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839854 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/empty-dir" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839870 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/git-repo" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839886 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/host-path" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839909 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/nfs" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839925 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/secret" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839942 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/iscsi" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839965 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/downward-api" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.839982 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/fc" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.840001 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/configmap" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.840021 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/projected" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.840035 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/local-volume" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.841180 4703 plugins.go:603] "Loaded volume plugin" pluginName="kubernetes.io/csi" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.842152 4703 server.go:1280] "Started kubelet" Feb 02 12:51:13 crc systemd[1]: Started Kubernetes Kubelet. Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.845896 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.845847 4703 ratelimit.go:55] "Setting rate limiting for endpoint" service="podresources" qps=100 burstTokens=10 Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.845890 4703 server.go:163] "Starting to listen" address="0.0.0.0" port=10250 Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.846883 4703 server.go:236] "Starting to serve the podresources API" endpoint="unix:/var/lib/kubelet/pod-resources/kubelet.sock" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.846984 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate rotation is enabled Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.847022 4703 fs_resource_analyzer.go:67] "Starting FS ResourceAnalyzer" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.847219 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 18:04:32.441267347 +0000 UTC Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.847396 4703 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.847844 4703 volume_manager.go:287] "The desired_state_of_world populator starts" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.847915 4703 volume_manager.go:289] "Starting Kubelet Volume Manager" Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.847989 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="200ms" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848029 4703 desired_state_of_world_populator.go:146] "Desired state populator starts to run" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848401 4703 factory.go:219] Registration of the containerd container factory failed: unable to create containerd client: containerd: cannot unix dial containerd api service: dial unix /run/containerd/containerd.sock: connect: no such file or directory Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848450 4703 factory.go:55] Registering systemd factory Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848469 4703 factory.go:221] Registration of the systemd container factory successfully Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848934 4703 factory.go:153] Registering CRI-O factory Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.848975 4703 factory.go:221] Registration of the crio container factory successfully Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.849016 4703 factory.go:103] Registering Raw factory Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.849041 4703 manager.go:1196] Started watching for new ooms in manager Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.849935 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.850033 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.850244 4703 manager.go:319] Starting recovery of all containers Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.851246 4703 server.go:460] "Adding debug handlers to kubelet server" Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.873523 4703 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18906efa0fc58b0c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 12:51:13.842088716 +0000 UTC m=+0.857296290,LastTimestamp:2026-02-02 12:51:13.842088716 +0000 UTC m=+0.857296290,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.886952 4703 manager.go:324] Recovery completed Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887747 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887807 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887825 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887841 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887859 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887874 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887888 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887900 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887917 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887932 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887945 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887959 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887973 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.887987 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888022 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888036 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888055 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888069 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888082 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888096 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888108 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888123 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888138 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888160 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888173 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888186 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888203 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888218 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888252 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5b88f790-22fa-440e-b583-365168c0b23d" volumeName="kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888266 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888349 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888366 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888378 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888390 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888405 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888419 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888433 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888448 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888460 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888473 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888490 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888505 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="44663579-783b-4372-86d6-acf235a62d72" volumeName="kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888519 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888532 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d751cbb-f2e2-430d-9754-c882a5e924a5" volumeName="kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888546 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888560 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888572 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888587 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888603 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888619 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888643 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888655 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888715 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888736 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888753 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888767 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1386a44e-36a2-460c-96d0-0359d2b6f0f5" volumeName="kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888782 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888797 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888810 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888826 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888840 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888853 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888865 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888880 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888895 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888909 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888924 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888937 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888953 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888966 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888980 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.888990 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889001 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889011 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889020 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889028 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889037 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" volumeName="kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889046 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889056 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889067 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889082 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889092 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889103 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889113 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889123 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889132 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889143 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889154 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889165 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889177 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889186 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889195 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" volumeName="kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889204 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889212 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="d75a4c96-2883-4a0b-bab2-0fab2b6c0b49" volumeName="kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889220 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889230 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889240 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889250 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889260 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="87cf06ed-a83f-41a7-828d-70653580a8cb" volumeName="kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889308 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889322 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889337 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889351 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889364 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889383 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889397 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6312bbd-5731-4ea0-a20f-81d5a57df44a" volumeName="kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889409 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889422 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889435 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889447 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889458 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889469 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="4bb40260-dbaa-4fb0-84df-5e680505d512" volumeName="kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889662 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" volumeName="kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889676 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889691 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889703 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889721 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889734 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="efdd0498-1daa-4136-9a4a-3b948c2293fc" volumeName="kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889747 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b78653f-4ff9-4508-8672-245ed9b561e3" volumeName="kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889761 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889773 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889786 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889802 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889816 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889828 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889840 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889888 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889904 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889919 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889932 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889943 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889954 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889965 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889974 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889983 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5441d097-087c-4d9a-baa8-b210afa90fc9" volumeName="kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.889995 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6402fda4-df10-493c-b4e5-d0569419652d" volumeName="kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890006 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890016 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890030 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890042 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890054 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6731426b-95fe-49ff-bb5f-40441049fde2" volumeName="kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890068 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890080 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890091 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890104 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="fda69060-fa79-4696-b1a6-7980f124bf7c" volumeName="kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890120 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="22c825df-677d-4ca6-82db-3454ed06e783" volumeName="kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890135 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890147 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890163 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890175 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890187 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890201 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" volumeName="kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890214 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bf126b07-da06-4140-9a57-dfd54fc6b486" volumeName="kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890226 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890240 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09efc573-dbb6-4249-bd59-9b87aba8dd28" volumeName="kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890250 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6ea678ab-3438-413e-bfe3-290ae7725660" volumeName="kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890262 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890294 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7539238d-5fe0-46ed-884e-1c3b566537ec" volumeName="kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890306 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" volumeName="kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890320 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890331 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890342 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="01ab3dd5-8196-46d0-ad33-122e2ca51def" volumeName="kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890352 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890362 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890371 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890380 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="496e6271-fb68-4057-954e-a0d97a4afa3f" volumeName="kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890389 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" volumeName="kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890399 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="57a731c4-ef35-47a8-b875-bfb08a7f8011" volumeName="kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890410 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7bb08738-c794-4ee8-9972-3a62ca171029" volumeName="kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890419 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890429 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="a31745f5-9847-4afe-82a5-3161cc66ca93" volumeName="kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890437 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" volumeName="kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890445 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" volumeName="kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890454 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="0b574797-001e-440a-8f4e-c0be86edad0f" volumeName="kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890463 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890472 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="25e176fe-21b4-4974-b1ed-c8b94f112a7f" volumeName="kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890482 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890492 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="37a5e44f-9a88-4405-be8a-b645485e7312" volumeName="kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890503 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="43509403-f426-496e-be36-56cef71462f5" volumeName="kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890512 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1bf7eb37-55a3-4c65-b768-a94c82151e69" volumeName="kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890521 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="20b0d48f-5fd6-431c-a545-e3c800c7b866" volumeName="kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890530 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3ab1a177-2de0-46d9-b765-d0d0649bb42e" volumeName="kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890538 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="925f1c65-6136-48ba-85aa-3a3b50560753" volumeName="kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890545 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="9d4552c7-cd75-42dd-8880-30dd377c49a4" volumeName="kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890552 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" volumeName="kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890560 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="e7e6199b-1264-4501-8953-767f51328d08" volumeName="kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890569 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" volumeName="kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890579 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890587 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5fe579f8-e8a6-4643-bce5-a661393c4dde" volumeName="kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890596 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="6509e943-70c6-444c-bc41-48a544e36fbd" volumeName="kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890604 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890613 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890624 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" volumeName="kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890632 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="31d8b7a1-420e-4252-a5b7-eebe8a111292" volumeName="kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890642 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3b6479f0-333b-4a96-9adf-2099afdc2447" volumeName="kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890650 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" volumeName="kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890658 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="49ef4625-1d3a-4a9f-b595-c2433d32326d" volumeName="kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890667 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="5225d0e4-402f-4861-b410-819f433b1803" volumeName="kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890676 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="7583ce53-e0fe-4a16-9e4d-50516596a136" volumeName="kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.890684 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892930 4703 reconstruct.go:144] "Volume is marked device as uncertain and added into the actual state" volumeName="kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" deviceMountPath="/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892956 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" volumeName="kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892967 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="ef543e1b-8068-4ea3-b32a-61027b32e95d" volumeName="kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892978 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="1d611f23-29be-4491-8495-bee1670e935f" volumeName="kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892989 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" volumeName="kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.892998 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="c03ee662-fb2f-4fc4-a2c1-af487c19d254" volumeName="kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.893008 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" volumeName="kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.893016 4703 reconstruct.go:130] "Volume is marked as uncertain and added into the actual state" pod="" podName="8f668bae-612b-4b75-9490-919e737c6a3b" volumeName="kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" seLinuxMountContext="" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.893032 4703 reconstruct.go:97] "Volume reconstruction finished" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.893038 4703 reconciler.go:26] "Reconciler: start to sync state" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.899356 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.901582 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.901693 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.901763 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.902439 4703 cpu_manager.go:225] "Starting CPU manager" policy="none" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.902457 4703 cpu_manager.go:226] "Reconciling" reconcilePeriod="10s" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.902474 4703 state_mem.go:36] "Initialized new in-memory state store" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.930248 4703 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv4" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.932391 4703 kubelet_network_linux.go:50] "Initialized iptables rules." protocol="IPv6" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.932494 4703 status_manager.go:217] "Starting to sync pod status with apiserver" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.932534 4703 kubelet.go:2335] "Starting kubelet main sync loop" Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.932582 4703 kubelet.go:2359] "Skipping pod synchronization" err="[container runtime status check may not have completed yet, PLEG is not healthy: pleg has yet to be successful]" Feb 02 12:51:13 crc kubenswrapper[4703]: W0202 12:51:13.933499 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.933658 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:13 crc kubenswrapper[4703]: E0202 12:51:13.948561 4703 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.961375 4703 policy_none.go:49] "None policy: Start" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.962529 4703 memory_manager.go:170] "Starting memorymanager" policy="None" Feb 02 12:51:13 crc kubenswrapper[4703]: I0202 12:51:13.962589 4703 state_mem.go:35] "Initializing new in-memory state store" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.014213 4703 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/default/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{crc.18906efa0fc58b0c default 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Node,Namespace:,Name:crc,UID:crc,APIVersion:,ResourceVersion:,FieldPath:,},Reason:Starting,Message:Starting kubelet.,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 12:51:13.842088716 +0000 UTC m=+0.857296290,LastTimestamp:2026-02-02 12:51:13.842088716 +0000 UTC m=+0.857296290,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.032936 4703 kubelet.go:2359] "Skipping pod synchronization" err="container runtime status check may not have completed yet" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.049386 4703 kubelet_node_status.go:503] "Error getting the current node from lister" err="node \"crc\" not found" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.049633 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="400ms" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.147527 4703 manager.go:334] "Starting Device Plugin manager" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.147608 4703 manager.go:513] "Failed to read data from checkpoint" checkpoint="kubelet_internal_checkpoint" err="checkpoint is not found" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.147622 4703 server.go:79] "Starting device plugin registration server" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.148143 4703 eviction_manager.go:189] "Eviction manager: starting control loop" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.148159 4703 container_log_manager.go:189] "Initializing container log rotate workers" workers=1 monitorPeriod="10s" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.148429 4703 plugin_watcher.go:51] "Plugin Watcher Start" path="/var/lib/kubelet/plugins_registry" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.148544 4703 plugin_manager.go:116] "The desired_state_of_world populator (plugin watcher) starts" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.148552 4703 plugin_manager.go:118] "Starting Kubelet Plugin Manager" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.156363 4703 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.234001 4703 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc","openshift-machine-config-operator/kube-rbac-proxy-crio-crc","openshift-etcd/etcd-crc","openshift-kube-apiserver/kube-apiserver-crc","openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.234232 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.235789 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.235839 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.235855 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236032 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236361 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236422 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236878 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.236889 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237062 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237208 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237255 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237705 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237730 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.237910 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238094 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238135 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238318 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238341 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238367 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238393 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238406 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238519 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238545 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238657 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238809 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.238852 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239510 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239525 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239532 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239851 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.239888 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241097 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241109 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241096 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241154 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.241169 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.249146 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.250110 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.250140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.250152 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.250179 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.250775 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298646 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298708 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298731 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298757 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298815 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298897 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298942 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298969 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.298995 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299080 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299140 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299185 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299219 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299256 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.299318 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401180 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401246 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401303 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401331 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401384 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401413 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401466 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401443 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-data-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401513 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-resource-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401464 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"static-pod-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-static-pod-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401495 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401548 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401561 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401520 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401494 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-var-lib-kubelet\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401464 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kube\" (UniqueName: \"kubernetes.io/host-path/d1b160f5dda77d281dd8e69ec8d817f9-etc-kube\") pod \"kube-rbac-proxy-crio-crc\" (UID: \"d1b160f5dda77d281dd8e69ec8d817f9\") " pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401600 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401643 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-resource-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401698 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401733 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-resource-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401739 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401762 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-cert-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401818 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401839 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401875 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401880 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"usr-local-bin\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-usr-local-bin\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401894 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401918 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-dir\" (UniqueName: \"kubernetes.io/host-path/2139d3e2895fc6797b9c76a1b4c9886d-log-dir\") pod \"etcd-crc\" (UID: \"2139d3e2895fc6797b9c76a1b4c9886d\") " pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.401962 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f614b9022728cf315e60c057852e563e-cert-dir\") pod \"kube-controller-manager-crc\" (UID: \"f614b9022728cf315e60c057852e563e\") " pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.402090 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/3dcd261975c3d6b9a6ad6367fd4facd3-cert-dir\") pod \"openshift-kube-scheduler-crc\" (UID: \"3dcd261975c3d6b9a6ad6367fd4facd3\") " pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.450830 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="800ms" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.450904 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.453016 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.453062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.453074 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.453105 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.453585 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.573815 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.581163 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.602201 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd/etcd-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.613735 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.620752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.641612 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd1b160f5dda77d281dd8e69ec8d817f9.slice/crio-384bc428c3e6d4b4d5dc34e7d9365fba681a4834f84007189623a37bf3b7f0bb WatchSource:0}: Error finding container 384bc428c3e6d4b4d5dc34e7d9365fba681a4834f84007189623a37bf3b7f0bb: Status 404 returned error can't find the container with id 384bc428c3e6d4b4d5dc34e7d9365fba681a4834f84007189623a37bf3b7f0bb Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.646534 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3dcd261975c3d6b9a6ad6367fd4facd3.slice/crio-d781fddef7ea800a2c90cc7fc057133c104e5654bf43b93d9ccfbbd8acc45765 WatchSource:0}: Error finding container d781fddef7ea800a2c90cc7fc057133c104e5654bf43b93d9ccfbbd8acc45765: Status 404 returned error can't find the container with id d781fddef7ea800a2c90cc7fc057133c104e5654bf43b93d9ccfbbd8acc45765 Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.649498 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod2139d3e2895fc6797b9c76a1b4c9886d.slice/crio-44ef8d6c526f574db93f520c9992aa1e914e5610f156c876739e6075153a3445 WatchSource:0}: Error finding container 44ef8d6c526f574db93f520c9992aa1e914e5610f156c876739e6075153a3445: Status 404 returned error can't find the container with id 44ef8d6c526f574db93f520c9992aa1e914e5610f156c876739e6075153a3445 Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.653913 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf614b9022728cf315e60c057852e563e.slice/crio-61acabe2abfb36345339e02c19c4e03ffa764b66c79c9f6545d162190b2a9612 WatchSource:0}: Error finding container 61acabe2abfb36345339e02c19c4e03ffa764b66c79c9f6545d162190b2a9612: Status 404 returned error can't find the container with id 61acabe2abfb36345339e02c19c4e03ffa764b66c79c9f6545d162190b2a9612 Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.656074 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.656198 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.661952 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf4b27818a5e8e43d0dc095d08835c792.slice/crio-31218c2456c8cb77fbc527dfeb038858e100ca4a4c5203599df5212347f4eaaf WatchSource:0}: Error finding container 31218c2456c8cb77fbc527dfeb038858e100ca4a4c5203599df5212347f4eaaf: Status 404 returned error can't find the container with id 31218c2456c8cb77fbc527dfeb038858e100ca4a4c5203599df5212347f4eaaf Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.775972 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.776058 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.847085 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.848057 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-06 00:53:34.340142251 +0000 UTC Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.854722 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.855952 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.855982 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.855991 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.856016 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.856450 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Feb 02 12:51:14 crc kubenswrapper[4703]: W0202 12:51:14.857071 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:14 crc kubenswrapper[4703]: E0202 12:51:14.857138 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.938625 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"61acabe2abfb36345339e02c19c4e03ffa764b66c79c9f6545d162190b2a9612"} Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.939876 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"44ef8d6c526f574db93f520c9992aa1e914e5610f156c876739e6075153a3445"} Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.940843 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"d781fddef7ea800a2c90cc7fc057133c104e5654bf43b93d9ccfbbd8acc45765"} Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.942073 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"384bc428c3e6d4b4d5dc34e7d9365fba681a4834f84007189623a37bf3b7f0bb"} Feb 02 12:51:14 crc kubenswrapper[4703]: I0202 12:51:14.943047 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"31218c2456c8cb77fbc527dfeb038858e100ca4a4c5203599df5212347f4eaaf"} Feb 02 12:51:15 crc kubenswrapper[4703]: W0202 12:51:15.115016 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:15 crc kubenswrapper[4703]: E0202 12:51:15.115160 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:15 crc kubenswrapper[4703]: E0202 12:51:15.252982 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="1.6s" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.657056 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.658848 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.658897 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.658909 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.658947 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:15 crc kubenswrapper[4703]: E0202 12:51:15.659624 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.846972 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.848323 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-09 11:54:11.654759299 +0000 UTC Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.878600 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 12:51:15 crc kubenswrapper[4703]: E0202 12:51:15.881696 4703 certificate_manager.go:562] "Unhandled Error" err="kubernetes.io/kube-apiserver-client-kubelet: Failed while requesting a signed certificate from the control plane: cannot create certificate signing request: Post \"https://api-int.crc.testing:6443/apis/certificates.k8s.io/v1/certificatesigningrequests\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.947641 4703 generic.go:334] "Generic (PLEG): container finished" podID="d1b160f5dda77d281dd8e69ec8d817f9" containerID="99f3e81f4a266e5d2fed5b2cd89465cfb376b62d8699750a0593225130b24c41" exitCode=0 Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.947705 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerDied","Data":"99f3e81f4a266e5d2fed5b2cd89465cfb376b62d8699750a0593225130b24c41"} Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.947761 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949252 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949361 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612" exitCode=0 Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949370 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949444 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612"} Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.949509 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.950588 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.950610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.950618 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.950863 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604"} Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.952772 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.952793 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"8c37335c5a7e3161e13c7faaf991ea0e954f790d5dadd2a054c565bb1cc2ddbf"} Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.952761 4703 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="8c37335c5a7e3161e13c7faaf991ea0e954f790d5dadd2a054c565bb1cc2ddbf" exitCode=0 Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.952872 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953312 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953338 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953694 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.953732 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.954412 4703 generic.go:334] "Generic (PLEG): container finished" podID="3dcd261975c3d6b9a6ad6367fd4facd3" containerID="a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361" exitCode=0 Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.954444 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerDied","Data":"a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361"} Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.954519 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.955577 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.955599 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:15 crc kubenswrapper[4703]: I0202 12:51:15.955609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.847030 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.849113 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 19:23:46.069041223 +0000 UTC Feb 02 12:51:16 crc kubenswrapper[4703]: E0202 12:51:16.853716 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="3.2s" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.959237 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.959305 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.959322 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.960562 4703 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="1e7c83c32acd3792c8e0ba3694aef53d312b50f075ed59918bcc3019e566f4b5" exitCode=0 Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.960608 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"1e7c83c32acd3792c8e0ba3694aef53d312b50f075ed59918bcc3019e566f4b5"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.960712 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.961594 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.961644 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.961658 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965109 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965352 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965378 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965395 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" event={"ID":"3dcd261975c3d6b9a6ad6367fd4facd3","Type":"ContainerStarted","Data":"84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965791 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965818 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.965830 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.967079 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" event={"ID":"d1b160f5dda77d281dd8e69ec8d817f9","Type":"ContainerStarted","Data":"744b52f4307af5c50b6c669d4aad3485bb54ccceba58eb35891c027190b8ade1"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.967111 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.967755 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.967778 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.967786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.968787 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c"} Feb 02 12:51:16 crc kubenswrapper[4703]: I0202 12:51:16.968816 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b"} Feb 02 12:51:17 crc kubenswrapper[4703]: W0202 12:51:17.005603 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.RuntimeClass: Get "https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:17 crc kubenswrapper[4703]: E0202 12:51:17.005684 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.RuntimeClass: failed to list *v1.RuntimeClass: Get \"https://api-int.crc.testing:6443/apis/node.k8s.io/v1/runtimeclasses?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.260211 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.261413 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.261473 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.261485 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.261517 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:17 crc kubenswrapper[4703]: W0202 12:51:17.261638 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.CSIDriver: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:17 crc kubenswrapper[4703]: E0202 12:51:17.261693 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.CSIDriver: failed to list *v1.CSIDriver: Get \"https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csidrivers?limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:17 crc kubenswrapper[4703]: E0202 12:51:17.262125 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="Post \"https://api-int.crc.testing:6443/api/v1/nodes\": dial tcp 38.102.83.198:6443: connect: connection refused" node="crc" Feb 02 12:51:17 crc kubenswrapper[4703]: W0202 12:51:17.814761 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Service: Get "https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:17 crc kubenswrapper[4703]: E0202 12:51:17.814845 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Service: failed to list *v1.Service: Get \"https://api-int.crc.testing:6443/api/v1/services?fieldSelector=spec.clusterIP%21%3DNone&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.847128 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.849288 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 01:08:26.446761404 +0000 UTC Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.974969 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592"} Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975010 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495"} Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975021 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d"} Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975041 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975892 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.975907 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.977486 4703 generic.go:334] "Generic (PLEG): container finished" podID="2139d3e2895fc6797b9c76a1b4c9886d" containerID="c998bcb529632b8579f505d585c0fecc15b89bb42ba32ca3e2076447229abc98" exitCode=0 Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.977569 4703 prober_manager.go:312] "Failed to trigger a manual run" probe="Readiness" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.977601 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.977625 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978138 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978135 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerDied","Data":"c998bcb529632b8579f505d585c0fecc15b89bb42ba32ca3e2076447229abc98"} Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978335 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978512 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978525 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978534 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978608 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978622 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978813 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978836 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.978845 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.982360 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.982397 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:17 crc kubenswrapper[4703]: I0202 12:51:17.982410 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:18 crc kubenswrapper[4703]: W0202 12:51:18.317841 4703 reflector.go:561] k8s.io/client-go/informers/factory.go:160: failed to list *v1.Node: Get "https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:18 crc kubenswrapper[4703]: E0202 12:51:18.317934 4703 reflector.go:158] "Unhandled Error" err="k8s.io/client-go/informers/factory.go:160: Failed to watch *v1.Node: failed to list *v1.Node: Get \"https://api-int.crc.testing:6443/api/v1/nodes?fieldSelector=metadata.name%3Dcrc&limit=500&resourceVersion=0\": dial tcp 38.102.83.198:6443: connect: connection refused" logger="UnhandledError" Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.847549 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": dial tcp 38.102.83.198:6443: connect: connection refused Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.849667 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-29 22:29:34.582325485 +0000 UTC Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.987608 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.987725 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"d9ed6afad2db01385c2a5a3ec60528543897e13a9e81a65436b3e660d6038a8e"} Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.987810 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"69264b9130879a7e1d9a6a12ec2a1f9210ba1d25e19dffa05ec70aec9bd35a9a"} Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.987844 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.987867 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"01af4039ed36e4babdbe2982d0acf73eab09079b5678273e490d3dc73f6db562"} Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.988616 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.988657 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:18 crc kubenswrapper[4703]: I0202 12:51:18.988676 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.850709 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 02:21:13.668386632 +0000 UTC Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.992084 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.994123 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592" exitCode=255 Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.994166 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592"} Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.994300 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.995678 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.995703 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.995712 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.996171 4703 scope.go:117] "RemoveContainer" containerID="c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.998746 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"61cf67f8cfcf7b14af774a7ae02fea74b7a13dd5a0c5f4c02b916ffe181444d0"} Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.998794 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd/etcd-crc" event={"ID":"2139d3e2895fc6797b9c76a1b4c9886d","Type":"ContainerStarted","Data":"7dda63a16d941b1b01e17af6302247cd80145fff62a308f6c6d36cf3e953ebb2"} Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.998870 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.999477 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.999502 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:19 crc kubenswrapper[4703]: I0202 12:51:19.999510 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.121483 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Rotating certificates Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.252863 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.253016 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.254132 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.254163 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.254174 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.463159 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.464365 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.464403 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.464413 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.464437 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:20 crc kubenswrapper[4703]: I0202 12:51:20.851689 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 23:07:02.051487273 +0000 UTC Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.004893 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.006694 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca"} Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.006770 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.006821 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010230 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010294 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010307 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010262 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010346 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.010367 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.336330 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-etcd/etcd-crc" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.453367 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.852628 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-03 03:15:13.457486778 +0000 UTC Feb 02 12:51:21 crc kubenswrapper[4703]: I0202 12:51:21.933241 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.008538 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.008570 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.008679 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.009593 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.009623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.009635 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.010369 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.010389 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.010397 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:22 crc kubenswrapper[4703]: I0202 12:51:22.853619 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-29 11:15:23.890663861 +0000 UTC Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.011314 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.012710 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.012839 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.012946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.678011 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.678358 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.680044 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.680353 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.680536 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:23 crc kubenswrapper[4703]: I0202 12:51:23.853964 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 07:54:05.531211461 +0000 UTC Feb 02 12:51:24 crc kubenswrapper[4703]: E0202 12:51:24.156526 4703 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 12:51:24 crc kubenswrapper[4703]: I0202 12:51:24.854951 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 11:55:35.439853275 +0000 UTC Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.855782 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-26 16:36:34.142540366 +0000 UTC Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.869344 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.869842 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.871533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.871617 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:25 crc kubenswrapper[4703]: I0202 12:51:25.871637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.658883 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.660217 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.661482 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.661516 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.661526 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:26 crc kubenswrapper[4703]: I0202 12:51:26.856591 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 20:20:54.206155794 +0000 UTC Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.648052 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.648262 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.651773 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.651835 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.651849 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.663424 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:27 crc kubenswrapper[4703]: I0202 12:51:27.857456 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-18 06:36:34.270485469 +0000 UTC Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.025933 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.027680 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.028256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.028392 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.033487 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.215366 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-etcd/etcd-crc" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.215611 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.216787 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.216846 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.216863 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.260923 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-etcd/etcd-crc" Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.857979 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 18:09:56.663408847 +0000 UTC Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.870304 4703 patch_prober.go:28] interesting pod/kube-controller-manager-crc container/cluster-policy-controller namespace/openshift-kube-controller-manager: Startup probe status=failure output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 12:51:28 crc kubenswrapper[4703]: I0202 12:51:28.870378 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podUID="f614b9022728cf315e60c057852e563e" containerName="cluster-policy-controller" probeResult="failure" output="Get \"https://192.168.126.11:10357/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.028030 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.028176 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029224 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029255 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029263 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029382 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.029393 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.043895 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-etcd/etcd-crc" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.848334 4703 csi_plugin.go:884] Failed to contact API server when waiting for CSINode publishing: Get "https://api-int.crc.testing:6443/apis/storage.k8s.io/v1/csinodes/crc?resourceVersion=0": net/http: TLS handshake timeout Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.858546 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 00:05:19.245135368 +0000 UTC Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.922775 4703 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.922830 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.927159 4703 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 403" start-of-body={"kind":"Status","apiVersion":"v1","metadata":{},"status":"Failure","message":"forbidden: User \"system:anonymous\" cannot get path \"/livez\"","reason":"Forbidden","details":{},"code":403} Feb 02 12:51:29 crc kubenswrapper[4703]: I0202 12:51:29.927202 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 403" Feb 02 12:51:30 crc kubenswrapper[4703]: I0202 12:51:30.029894 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:30 crc kubenswrapper[4703]: I0202 12:51:30.030678 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:30 crc kubenswrapper[4703]: I0202 12:51:30.030794 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:30 crc kubenswrapper[4703]: I0202 12:51:30.030891 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:30 crc kubenswrapper[4703]: I0202 12:51:30.859228 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 08:59:16.683443995 +0000 UTC Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.860167 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 23:49:15.391409826 +0000 UTC Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.937114 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.937239 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.937641 4703 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.937686 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.938479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.938515 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.938527 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:31 crc kubenswrapper[4703]: I0202 12:51:31.940726 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.034662 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.035110 4703 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver-check-endpoints namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" start-of-body= Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.035182 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" probeResult="failure" output="Get \"https://192.168.126.11:17697/healthz\": dial tcp 192.168.126.11:17697: connect: connection refused" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.035465 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.035495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.035503 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:32 crc kubenswrapper[4703]: I0202 12:51:32.860383 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-12 00:18:55.357185754 +0000 UTC Feb 02 12:51:33 crc kubenswrapper[4703]: I0202 12:51:33.861020 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 04:44:32.179015003 +0000 UTC Feb 02 12:51:34 crc kubenswrapper[4703]: E0202 12:51:34.156650 4703 eviction_manager.go:285] "Eviction manager: failed to get summary stats" err="failed to get node info: node \"crc\" not found" Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.861513 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-10 15:23:47.782076116 +0000 UTC Feb 02 12:51:34 crc kubenswrapper[4703]: E0202 12:51:34.915824 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": context deadline exceeded" interval="6.4s" Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.917671 4703 trace.go:236] Trace[781969204]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 12:51:20.766) (total time: 14151ms): Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[781969204]: ---"Objects listed" error: 14151ms (12:51:34.917) Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[781969204]: [14.151523843s] [14.151523843s] END Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.917704 4703 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 12:51:34 crc kubenswrapper[4703]: E0202 12:51:34.922054 4703 kubelet_node_status.go:99] "Unable to register node with API server" err="nodes \"crc\" is forbidden: autoscaling.openshift.io/ManagedNode infra config cache not synchronized" node="crc" Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.923157 4703 reconstruct.go:205] "DevicePaths of reconstructed volumes updated" Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924457 4703 trace.go:236] Trace[265621428]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 12:51:20.650) (total time: 14273ms): Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[265621428]: ---"Objects listed" error: 14273ms (12:51:34.924) Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[265621428]: [14.273857071s] [14.273857071s] END Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924503 4703 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924732 4703 trace.go:236] Trace[1261992356]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 12:51:21.665) (total time: 13259ms): Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[1261992356]: ---"Objects listed" error: 13259ms (12:51:34.924) Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[1261992356]: [13.259262709s] [13.259262709s] END Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924769 4703 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924877 4703 trace.go:236] Trace[1142363142]: "Reflector ListAndWatch" name:k8s.io/client-go/informers/factory.go:160 (02-Feb-2026 12:51:21.070) (total time: 13854ms): Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[1142363142]: ---"Objects listed" error: 13854ms (12:51:34.924) Feb 02 12:51:34 crc kubenswrapper[4703]: Trace[1142363142]: [13.854732005s] [13.854732005s] END Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.924899 4703 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 12:51:34 crc kubenswrapper[4703]: I0202 12:51:34.943903 4703 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.407617 4703 csr.go:261] certificate signing request csr-t2hkb is approved, waiting to be issued Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.468929 4703 csr.go:257] certificate signing request csr-t2hkb is issued Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.840993 4703 apiserver.go:52] "Watching apiserver" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.844927 4703 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845171 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-network-diagnostics/network-check-target-xd92c","openshift-network-node-identity/network-node-identity-vrzqb","openshift-network-operator/iptables-alerter-4ln5h","openshift-network-operator/network-operator-58b4c7f79c-55gtf","openshift-dns/node-resolver-279cn","openshift-network-console/networking-console-plugin-85b44fc459-gdk6g","openshift-network-diagnostics/network-check-source-55646444c4-trplf"] Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845511 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845557 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.845598 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845725 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.845835 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845943 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.845979 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.846228 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.846251 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.846300 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.847526 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.849367 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.849389 4703 desired_state_of_world_populator.go:154] "Finished populating initial desired state of world" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.849376 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.849599 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.849701 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.850251 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.850551 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.852568 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.855259 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.855385 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.855580 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.856308 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.863167 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 20:01:35.878991219 +0000 UTC Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.878208 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.879238 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.884723 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.890309 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.904567 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.917758 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.918375 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-daemon-2vnzs"] Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.918747 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.920767 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.921248 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.921289 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.921288 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.921329 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928310 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928357 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928388 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928409 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") pod \"5b88f790-22fa-440e-b583-365168c0b23d\" (UID: \"5b88f790-22fa-440e-b583-365168c0b23d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928434 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928457 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928477 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928496 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") pod \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\" (UID: \"cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928516 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928535 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928554 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928575 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928598 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928621 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928650 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928685 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928709 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928756 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928780 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928803 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928823 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928844 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928861 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928893 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928924 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928941 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.928958 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929077 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929099 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929119 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929137 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929154 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929170 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929164 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85" (OuterVolumeSpecName: "kube-api-access-x2m85") pod "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" (UID: "cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d"). InnerVolumeSpecName "kube-api-access-x2m85". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929189 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929238 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929257 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929290 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929308 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929330 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929346 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929364 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929400 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") pod \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\" (UID: \"bd23aa5c-e532-4e53-bccf-e79f130c5ae8\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929416 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") pod \"7bb08738-c794-4ee8-9972-3a62ca171029\" (UID: \"7bb08738-c794-4ee8-9972-3a62ca171029\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929431 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929451 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929468 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929486 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929504 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929519 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929519 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls" (OuterVolumeSpecName: "machine-approver-tls") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "machine-approver-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929534 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929552 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929562 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929577 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929600 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929616 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929631 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") pod \"20b0d48f-5fd6-431c-a545-e3c800c7b866\" (UID: \"20b0d48f-5fd6-431c-a545-e3c800c7b866\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929646 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929660 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929675 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929692 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") pod \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\" (UID: \"96b93a3a-6083-4aea-8eab-fe1aa8245ad9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929708 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929725 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929732 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key" (OuterVolumeSpecName: "signing-key") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-key". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929741 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929818 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929845 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929884 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929910 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929930 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929955 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929975 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929980 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.929995 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") pod \"1d611f23-29be-4491-8495-bee1670e935f\" (UID: \"1d611f23-29be-4491-8495-bee1670e935f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930016 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930037 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930058 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930086 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") pod \"4bb40260-dbaa-4fb0-84df-5e680505d512\" (UID: \"4bb40260-dbaa-4fb0-84df-5e680505d512\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930106 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930122 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930138 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930160 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb" (OuterVolumeSpecName: "kube-api-access-279lb") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "kube-api-access-279lb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930162 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930195 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930232 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930254 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930291 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930295 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8" (OuterVolumeSpecName: "kube-api-access-wxkg8") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "kube-api-access-wxkg8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930341 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930368 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930367 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930387 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930407 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930426 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930444 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930464 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930480 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930496 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930511 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930529 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930544 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930562 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") pod \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\" (UID: \"8cea82b4-6893-4ddc-af9f-1bb5ae425c5b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930565 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn" (OuterVolumeSpecName: "kube-api-access-jkwtn") pod "5b88f790-22fa-440e-b583-365168c0b23d" (UID: "5b88f790-22fa-440e-b583-365168c0b23d"). InnerVolumeSpecName "kube-api-access-jkwtn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930548 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images" (OuterVolumeSpecName: "images") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930580 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930691 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930723 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") pod \"49ef4625-1d3a-4a9f-b595-c2433d32326d\" (UID: \"49ef4625-1d3a-4a9f-b595-c2433d32326d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930745 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930754 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m" (OuterVolumeSpecName: "kube-api-access-gf66m") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "kube-api-access-gf66m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930762 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930786 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930806 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930833 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930856 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930883 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930906 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930928 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") pod \"44663579-783b-4372-86d6-acf235a62d72\" (UID: \"44663579-783b-4372-86d6-acf235a62d72\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930950 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930974 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.930997 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931017 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931040 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931042 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config" (OuterVolumeSpecName: "console-oauth-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-oauth-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931063 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931086 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931107 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931134 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931155 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931176 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931185 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931200 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931226 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") pod \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\" (UID: \"3cb93b32-e0ae-4377-b9c8-fdb9842c6d59\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931249 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") pod \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\" (UID: \"3ab1a177-2de0-46d9-b765-d0d0649bb42e\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931294 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931318 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931341 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931363 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") pod \"fda69060-fa79-4696-b1a6-7980f124bf7c\" (UID: \"fda69060-fa79-4696-b1a6-7980f124bf7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931388 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") pod \"01ab3dd5-8196-46d0-ad33-122e2ca51def\" (UID: \"01ab3dd5-8196-46d0-ad33-122e2ca51def\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931412 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931437 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") pod \"5225d0e4-402f-4861-b410-819f433b1803\" (UID: \"5225d0e4-402f-4861-b410-819f433b1803\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931460 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931484 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") pod \"a31745f5-9847-4afe-82a5-3161cc66ca93\" (UID: \"a31745f5-9847-4afe-82a5-3161cc66ca93\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931509 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") pod \"496e6271-fb68-4057-954e-a0d97a4afa3f\" (UID: \"496e6271-fb68-4057-954e-a0d97a4afa3f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931531 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931554 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931579 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931605 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") pod \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\" (UID: \"f88749ec-7931-4ee7-b3fc-1ec5e11f92e9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931631 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931671 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931698 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931725 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931753 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931777 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931802 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931823 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931854 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931875 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") pod \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\" (UID: \"a0128f3a-b052-44ed-a84e-c4c8aaf17c13\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931901 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931926 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") pod \"6731426b-95fe-49ff-bb5f-40441049fde2\" (UID: \"6731426b-95fe-49ff-bb5f-40441049fde2\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931950 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931984 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") pod \"e7e6199b-1264-4501-8953-767f51328d08\" (UID: \"e7e6199b-1264-4501-8953-767f51328d08\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932011 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932036 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") pod \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\" (UID: \"09ae3b1a-e8e7-4524-b54b-61eab6f9239a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932066 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") pod \"09efc573-dbb6-4249-bd59-9b87aba8dd28\" (UID: \"09efc573-dbb6-4249-bd59-9b87aba8dd28\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932054 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932090 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932136 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932160 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932181 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932202 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932225 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932248 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932324 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932351 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") pod \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\" (UID: \"1386a44e-36a2-460c-96d0-0359d2b6f0f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932380 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") pod \"31d8b7a1-420e-4252-a5b7-eebe8a111292\" (UID: \"31d8b7a1-420e-4252-a5b7-eebe8a111292\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932408 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") pod \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\" (UID: \"210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932436 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") pod \"5441d097-087c-4d9a-baa8-b210afa90fc9\" (UID: \"5441d097-087c-4d9a-baa8-b210afa90fc9\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932764 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") pod \"87cf06ed-a83f-41a7-828d-70653580a8cb\" (UID: \"87cf06ed-a83f-41a7-828d-70653580a8cb\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932796 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") pod \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\" (UID: \"308be0ea-9f5f-4b29-aeb1-5abd31a0b17b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932823 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932850 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") pod \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\" (UID: \"bc5039c0-ea34-426b-a2b7-fbbc87b49a6d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932872 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") pod \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\" (UID: \"25e176fe-21b4-4974-b1ed-c8b94f112a7f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932893 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") pod \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\" (UID: \"b11524ee-3fca-4b1b-9cdf-6da289fdbc7d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932918 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932944 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932971 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") pod \"22c825df-677d-4ca6-82db-3454ed06e783\" (UID: \"22c825df-677d-4ca6-82db-3454ed06e783\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932999 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") pod \"57a731c4-ef35-47a8-b875-bfb08a7f8011\" (UID: \"57a731c4-ef35-47a8-b875-bfb08a7f8011\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933032 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933065 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") pod \"bf126b07-da06-4140-9a57-dfd54fc6b486\" (UID: \"bf126b07-da06-4140-9a57-dfd54fc6b486\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933088 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") pod \"0b78653f-4ff9-4508-8672-245ed9b561e3\" (UID: \"0b78653f-4ff9-4508-8672-245ed9b561e3\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933109 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") pod \"6ea678ab-3438-413e-bfe3-290ae7725660\" (UID: \"6ea678ab-3438-413e-bfe3-290ae7725660\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933133 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933162 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933190 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") pod \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\" (UID: \"b6cd30de-2eeb-49a2-ab40-9167f4560ff5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933216 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") pod \"1bf7eb37-55a3-4c65-b768-a94c82151e69\" (UID: \"1bf7eb37-55a3-4c65-b768-a94c82151e69\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933242 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") pod \"5fe579f8-e8a6-4643-bce5-a661393c4dde\" (UID: \"5fe579f8-e8a6-4643-bce5-a661393c4dde\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933283 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") pod \"efdd0498-1daa-4136-9a4a-3b948c2293fc\" (UID: \"efdd0498-1daa-4136-9a4a-3b948c2293fc\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933311 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") pod \"6402fda4-df10-493c-b4e5-d0569419652d\" (UID: \"6402fda4-df10-493c-b4e5-d0569419652d\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933340 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") pod \"7539238d-5fe0-46ed-884e-1c3b566537ec\" (UID: \"7539238d-5fe0-46ed-884e-1c3b566537ec\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933366 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") pod \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\" (UID: \"49c341d1-5089-4bc2-86a0-a5e165cfcc6b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933393 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933420 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") pod \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\" (UID: \"b6312bbd-5731-4ea0-a20f-81d5a57df44a\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933448 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") pod \"43509403-f426-496e-be36-56cef71462f5\" (UID: \"43509403-f426-496e-be36-56cef71462f5\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933477 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") pod \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\" (UID: \"c03ee662-fb2f-4fc4-a2c1-af487c19d254\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933502 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") pod \"7583ce53-e0fe-4a16-9e4d-50516596a136\" (UID: \"7583ce53-e0fe-4a16-9e4d-50516596a136\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933526 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") pod \"0b574797-001e-440a-8f4e-c0be86edad0f\" (UID: \"0b574797-001e-440a-8f4e-c0be86edad0f\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933551 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") pod \"6509e943-70c6-444c-bc41-48a544e36fbd\" (UID: \"6509e943-70c6-444c-bc41-48a544e36fbd\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933578 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") pod \"9d4552c7-cd75-42dd-8880-30dd377c49a4\" (UID: \"9d4552c7-cd75-42dd-8880-30dd377c49a4\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933603 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") pod \"925f1c65-6136-48ba-85aa-3a3b50560753\" (UID: \"925f1c65-6136-48ba-85aa-3a3b50560753\") " Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933659 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933691 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933719 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933744 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933771 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933801 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-hosts-file\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933827 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933853 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933877 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k6txp\" (UniqueName: \"kubernetes.io/projected/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-kube-api-access-k6txp\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933902 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933928 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933955 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933987 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934018 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934048 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934075 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934116 4703 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5b88f790-22fa-440e-b583-365168c0b23d-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934134 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wxkg8\" (UniqueName: \"kubernetes.io/projected/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-kube-api-access-wxkg8\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934150 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jkwtn\" (UniqueName: \"kubernetes.io/projected/5b88f790-22fa-440e-b583-365168c0b23d-kube-api-access-jkwtn\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934165 4703 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934180 4703 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934197 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2m85\" (UniqueName: \"kubernetes.io/projected/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d-kube-api-access-x2m85\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934211 4703 reconciler_common.go:293] "Volume detached for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/22c825df-677d-4ca6-82db-3454ed06e783-machine-approver-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934225 4703 reconciler_common.go:293] "Volume detached for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-key\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934241 4703 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-images\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934257 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-279lb\" (UniqueName: \"kubernetes.io/projected/7bb08738-c794-4ee8-9972-3a62ca171029-kube-api-access-279lb\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934288 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934305 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934321 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gf66m\" (UniqueName: \"kubernetes.io/projected/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-kube-api-access-gf66m\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934337 4703 reconciler_common.go:293] "Volume detached for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-oauth-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931385 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs" (OuterVolumeSpecName: "tmpfs") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "tmpfs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931539 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931677 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.931931 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932028 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v" (OuterVolumeSpecName: "kube-api-access-pjr6v") pod "49ef4625-1d3a-4a9f-b595-c2433d32326d" (UID: "49ef4625-1d3a-4a9f-b595-c2433d32326d"). InnerVolumeSpecName "kube-api-access-pjr6v". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.957878 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate" (OuterVolumeSpecName: "default-certificate") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "default-certificate". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932178 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932545 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932704 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls" (OuterVolumeSpecName: "control-plane-machine-set-operator-tls") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "control-plane-machine-set-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932899 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.932949 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933044 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933113 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4" (OuterVolumeSpecName: "kube-api-access-w4xd4") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "kube-api-access-w4xd4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933258 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config" (OuterVolumeSpecName: "config") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933405 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert" (OuterVolumeSpecName: "package-server-manager-serving-cert") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "package-server-manager-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933569 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933716 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config" (OuterVolumeSpecName: "config") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933881 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j" (OuterVolumeSpecName: "kube-api-access-w7l8j") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "kube-api-access-w7l8j". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933957 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config" (OuterVolumeSpecName: "mcc-auth-proxy-config") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "mcc-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.933988 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz" (OuterVolumeSpecName: "kube-api-access-6g6sz") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "kube-api-access-6g6sz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934079 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config" (OuterVolumeSpecName: "console-config") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934176 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "496e6271-fb68-4057-954e-a0d97a4afa3f" (UID: "496e6271-fb68-4057-954e-a0d97a4afa3f"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934292 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934296 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7" (OuterVolumeSpecName: "kube-api-access-sb6h7") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "kube-api-access-sb6h7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934583 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934620 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88" (OuterVolumeSpecName: "kube-api-access-lzf88") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "kube-api-access-lzf88". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.934751 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7" (OuterVolumeSpecName: "kube-api-access-9xfj7") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "kube-api-access-9xfj7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935114 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935144 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52" (OuterVolumeSpecName: "kube-api-access-s4n52") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "kube-api-access-s4n52". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935257 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935329 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935484 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates" (OuterVolumeSpecName: "available-featuregates") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "available-featuregates". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.935956 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert" (OuterVolumeSpecName: "cert") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.936306 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk" (OuterVolumeSpecName: "kube-api-access-rnphk") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "kube-api-access-rnphk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.936328 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh" (OuterVolumeSpecName: "kube-api-access-xcgwh") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "kube-api-access-xcgwh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.936561 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.936799 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937121 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937143 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937322 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config" (OuterVolumeSpecName: "config") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937541 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937628 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937863 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert" (OuterVolumeSpecName: "srv-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "srv-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.937924 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls" (OuterVolumeSpecName: "machine-api-operator-tls") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "machine-api-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.938397 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc" (OuterVolumeSpecName: "kube-api-access-vt5rc") pod "44663579-783b-4372-86d6-acf235a62d72" (UID: "44663579-783b-4372-86d6-acf235a62d72"). InnerVolumeSpecName "kube-api-access-vt5rc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.938433 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config" (OuterVolumeSpecName: "config") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.938514 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.938810 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj" (OuterVolumeSpecName: "kube-api-access-4d4hj") pod "3ab1a177-2de0-46d9-b765-d0d0649bb42e" (UID: "3ab1a177-2de0-46d9-b765-d0d0649bb42e"). InnerVolumeSpecName "kube-api-access-4d4hj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.939020 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz" (OuterVolumeSpecName: "kube-api-access-8tdtz") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "kube-api-access-8tdtz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.942230 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls" (OuterVolumeSpecName: "image-registry-operator-tls") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "image-registry-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.942488 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.942593 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.951821 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config" (OuterVolumeSpecName: "config") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958628 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs" (OuterVolumeSpecName: "webhook-certs") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "webhook-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952156 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952493 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952608 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle" (OuterVolumeSpecName: "service-ca-bundle") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "service-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952631 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca" (OuterVolumeSpecName: "etcd-serving-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-serving-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.951001 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca" (OuterVolumeSpecName: "service-ca") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952976 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs" (OuterVolumeSpecName: "metrics-certs") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "metrics-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.952966 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca" (OuterVolumeSpecName: "client-ca") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953171 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7" (OuterVolumeSpecName: "kube-api-access-nzwt7") pod "96b93a3a-6083-4aea-8eab-fe1aa8245ad9" (UID: "96b93a3a-6083-4aea-8eab-fe1aa8245ad9"). InnerVolumeSpecName "kube-api-access-nzwt7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.951188 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953216 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953263 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert" (OuterVolumeSpecName: "oauth-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "oauth-serving-cert". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953301 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953427 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953650 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.953852 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6" (OuterVolumeSpecName: "kube-api-access-htfz6") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "kube-api-access-htfz6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954112 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert" (OuterVolumeSpecName: "ovn-control-plane-metrics-cert") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovn-control-plane-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954144 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954399 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs" (OuterVolumeSpecName: "kube-api-access-pcxfs") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "kube-api-access-pcxfs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958834 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh" (OuterVolumeSpecName: "kube-api-access-x4zgh") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "kube-api-access-x4zgh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954524 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" (UID: "8cea82b4-6893-4ddc-af9f-1bb5ae425c5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954739 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv" (OuterVolumeSpecName: "kube-api-access-zkvpv") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "kube-api-access-zkvpv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.955246 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds" (OuterVolumeSpecName: "kube-api-access-w9rds") pod "20b0d48f-5fd6-431c-a545-e3c800c7b866" (UID: "20b0d48f-5fd6-431c-a545-e3c800c7b866"). InnerVolumeSpecName "kube-api-access-w9rds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.955307 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958870 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config" (OuterVolumeSpecName: "config") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.955467 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config" (OuterVolumeSpecName: "encryption-config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "encryption-config". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.955482 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp" (OuterVolumeSpecName: "kube-api-access-ngvvp") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "kube-api-access-ngvvp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.955564 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5" (OuterVolumeSpecName: "kube-api-access-zgdk5") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "kube-api-access-zgdk5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.955603 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:51:36.454522265 +0000 UTC m=+23.469729799 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958373 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token" (OuterVolumeSpecName: "node-bootstrap-token") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "node-bootstrap-token". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958375 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl" (OuterVolumeSpecName: "kube-api-access-xcphl") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "kube-api-access-xcphl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958372 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg" (OuterVolumeSpecName: "kube-api-access-dbsvg") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "kube-api-access-dbsvg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.951839 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.958669 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782" (OuterVolumeSpecName: "kube-api-access-pj782") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "kube-api-access-pj782". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.954504 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959243 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959459 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt" (OuterVolumeSpecName: "kube-api-access-fqsjt") pod "efdd0498-1daa-4136-9a4a-3b948c2293fc" (UID: "efdd0498-1daa-4136-9a4a-3b948c2293fc"). InnerVolumeSpecName "kube-api-access-fqsjt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959267 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config" (OuterVolumeSpecName: "config") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959498 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh" (OuterVolumeSpecName: "kube-api-access-x7zkh") pod "6731426b-95fe-49ff-bb5f-40441049fde2" (UID: "6731426b-95fe-49ff-bb5f-40441049fde2"). InnerVolumeSpecName "kube-api-access-x7zkh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959692 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959745 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8" (OuterVolumeSpecName: "kube-api-access-6ccd8") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "kube-api-access-6ccd8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959797 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv" (OuterVolumeSpecName: "kube-api-access-d4lsv") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "kube-api-access-d4lsv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959819 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb" (OuterVolumeSpecName: "kube-api-access-mg5zb") pod "6402fda4-df10-493c-b4e5-d0569419652d" (UID: "6402fda4-df10-493c-b4e5-d0569419652d"). InnerVolumeSpecName "kube-api-access-mg5zb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.959885 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "bf126b07-da06-4140-9a57-dfd54fc6b486" (UID: "bf126b07-da06-4140-9a57-dfd54fc6b486"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.960261 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities" (OuterVolumeSpecName: "utilities") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.960030 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities" (OuterVolumeSpecName: "utilities") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.960669 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.961180 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls" (OuterVolumeSpecName: "metrics-tls") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "metrics-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.961365 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.961373 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.961640 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.961964 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.962067 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle" (OuterVolumeSpecName: "signing-cabundle") pod "25e176fe-21b4-4974-b1ed-c8b94f112a7f" (UID: "25e176fe-21b4-4974-b1ed-c8b94f112a7f"). InnerVolumeSpecName "signing-cabundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.962246 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.962492 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "925f1c65-6136-48ba-85aa-3a3b50560753" (UID: "925f1c65-6136-48ba-85aa-3a3b50560753"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.963922 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e7e6199b-1264-4501-8953-767f51328d08" (UID: "e7e6199b-1264-4501-8953-767f51328d08"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.964448 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config" (OuterVolumeSpecName: "config") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.964705 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf" (OuterVolumeSpecName: "kube-api-access-7c4vf") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "kube-api-access-7c4vf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.964877 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.964914 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca" (OuterVolumeSpecName: "service-ca") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.965007 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:36.464980395 +0000 UTC m=+23.480187929 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.965246 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.965085 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.965787 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume" (OuterVolumeSpecName: "config-volume") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.965951 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" (UID: "308be0ea-9f5f-4b29-aeb1-5abd31a0b17b"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.966555 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-identity-cm\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-ovnkube-identity-cm\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.967664 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "b6cd30de-2eeb-49a2-ab40-9167f4560ff5" (UID: "b6cd30de-2eeb-49a2-ab40-9167f4560ff5"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.967871 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config" (OuterVolumeSpecName: "multus-daemon-config") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "multus-daemon-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.967970 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx" (OuterVolumeSpecName: "kube-api-access-d6qdx") pod "87cf06ed-a83f-41a7-828d-70653580a8cb" (UID: "87cf06ed-a83f-41a7-828d-70653580a8cb"). InnerVolumeSpecName "kube-api-access-d6qdx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969008 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz" (OuterVolumeSpecName: "kube-api-access-bf2bz") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "kube-api-access-bf2bz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969411 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b88f790-22fa-440e-b583-365168c0b23d" path="/var/lib/kubelet/pods/5b88f790-22fa-440e-b583-365168c0b23d/volumes" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969480 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969547 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca" (OuterVolumeSpecName: "serviceca") pod "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" (UID: "3cb93b32-e0ae-4377-b9c8-fdb9842c6d59"). InnerVolumeSpecName "serviceca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969588 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config" (OuterVolumeSpecName: "config") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.970851 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist" (OuterVolumeSpecName: "cni-sysctl-allowlist") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-sysctl-allowlist". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.971373 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config" (OuterVolumeSpecName: "config") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.971977 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.972412 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.972513 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle" (OuterVolumeSpecName: "trusted-ca-bundle") pod "09ae3b1a-e8e7-4524-b54b-61eab6f9239a" (UID: "09ae3b1a-e8e7-4524-b54b-61eab6f9239a"). InnerVolumeSpecName "trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.972589 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.972606 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.973935 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca" (OuterVolumeSpecName: "client-ca") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.973687 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.969708 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2" (OuterVolumeSpecName: "kube-api-access-jhbk2") pod "bd23aa5c-e532-4e53-bccf-e79f130c5ae8" (UID: "bd23aa5c-e532-4e53-bccf-e79f130c5ae8"). InnerVolumeSpecName "kube-api-access-jhbk2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.971199 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca" (OuterVolumeSpecName: "etcd-service-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.972982 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca" (OuterVolumeSpecName: "image-import-ca") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "image-import-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975360 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975402 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client" (OuterVolumeSpecName: "etcd-client") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-client". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975422 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config" (OuterVolumeSpecName: "config") pod "7583ce53-e0fe-4a16-9e4d-50516596a136" (UID: "7583ce53-e0fe-4a16-9e4d-50516596a136"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975448 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config" (OuterVolumeSpecName: "config") pod "22c825df-677d-4ca6-82db-3454ed06e783" (UID: "22c825df-677d-4ca6-82db-3454ed06e783"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975839 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975870 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.976050 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh" (OuterVolumeSpecName: "kube-api-access-2w9zh") pod "4bb40260-dbaa-4fb0-84df-5e680505d512" (UID: "4bb40260-dbaa-4fb0-84df-5e680505d512"). InnerVolumeSpecName "kube-api-access-2w9zh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.976182 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca" (OuterVolumeSpecName: "etcd-ca") pod "09efc573-dbb6-4249-bd59-9b87aba8dd28" (UID: "09efc573-dbb6-4249-bd59-9b87aba8dd28"). InnerVolumeSpecName "etcd-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.976806 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd" (OuterVolumeSpecName: "kube-api-access-mnrrd") pod "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" (UID: "bc5039c0-ea34-426b-a2b7-fbbc87b49a6d"). InnerVolumeSpecName "kube-api-access-mnrrd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.975725 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5" (OuterVolumeSpecName: "kube-api-access-qg5z5") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "kube-api-access-qg5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.977667 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.977826 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf" (OuterVolumeSpecName: "kube-api-access-v47cf") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "kube-api-access-v47cf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.977843 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c" (OuterVolumeSpecName: "kube-api-access-tk88c") pod "7539238d-5fe0-46ed-884e-1c3b566537ec" (UID: "7539238d-5fe0-46ed-884e-1c3b566537ec"). InnerVolumeSpecName "kube-api-access-tk88c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.977867 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn" (OuterVolumeSpecName: "kube-api-access-lz9wn") pod "a31745f5-9847-4afe-82a5-3161cc66ca93" (UID: "a31745f5-9847-4afe-82a5-3161cc66ca93"). InnerVolumeSpecName "kube-api-access-lz9wn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.977881 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities" (OuterVolumeSpecName: "utilities") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978268 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs" (OuterVolumeSpecName: "certs") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978430 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "0b78653f-4ff9-4508-8672-245ed9b561e3" (UID: "0b78653f-4ff9-4508-8672-245ed9b561e3"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978455 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy" (OuterVolumeSpecName: "cni-binary-copy") pod "7bb08738-c794-4ee8-9972-3a62ca171029" (UID: "7bb08738-c794-4ee8-9972-3a62ca171029"). InnerVolumeSpecName "cni-binary-copy". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978492 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config" (OuterVolumeSpecName: "mcd-auth-proxy-config") pod "fda69060-fa79-4696-b1a6-7980f124bf7c" (UID: "fda69060-fa79-4696-b1a6-7980f124bf7c"). InnerVolumeSpecName "mcd-auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978781 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp" (OuterVolumeSpecName: "kube-api-access-fcqwp") pod "5fe579f8-e8a6-4643-bce5-a661393c4dde" (UID: "5fe579f8-e8a6-4643-bce5-a661393c4dde"). InnerVolumeSpecName "kube-api-access-fcqwp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.978945 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls" (OuterVolumeSpecName: "samples-operator-tls") pod "a0128f3a-b052-44ed-a84e-c4c8aaf17c13" (UID: "a0128f3a-b052-44ed-a84e-c4c8aaf17c13"). InnerVolumeSpecName "samples-operator-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.976109 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities" (OuterVolumeSpecName: "utilities") pod "1d611f23-29be-4491-8495-bee1670e935f" (UID: "1d611f23-29be-4491-8495-bee1670e935f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.979103 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.979117 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config" (OuterVolumeSpecName: "config") pod "01ab3dd5-8196-46d0-ad33-122e2ca51def" (UID: "01ab3dd5-8196-46d0-ad33-122e2ca51def"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.979349 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/ef543e1b-8068-4ea3-b32a-61027b32e95d-env-overrides\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.980301 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"iptables-alerter-script\" (UniqueName: \"kubernetes.io/configmap/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-iptables-alerter-script\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.980815 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert" (OuterVolumeSpecName: "profile-collector-cert") pod "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" (UID: "f88749ec-7931-4ee7-b3fc-1ec5e11f92e9"). InnerVolumeSpecName "profile-collector-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.981162 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config" (OuterVolumeSpecName: "config") pod "1386a44e-36a2-460c-96d0-0359d2b6f0f5" (UID: "1386a44e-36a2-460c-96d0-0359d2b6f0f5"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.981322 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images" (OuterVolumeSpecName: "images") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "images". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.981505 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz" (OuterVolumeSpecName: "kube-api-access-2d4wz") pod "5441d097-087c-4d9a-baa8-b210afa90fc9" (UID: "5441d097-087c-4d9a-baa8-b210afa90fc9"). InnerVolumeSpecName "kube-api-access-2d4wz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.982491 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "6ea678ab-3438-413e-bfe3-290ae7725660" (UID: "6ea678ab-3438-413e-bfe3-290ae7725660"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.982496 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct" (OuterVolumeSpecName: "kube-api-access-cfbct") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "kube-api-access-cfbct". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.982547 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7" (OuterVolumeSpecName: "kube-api-access-kfwg7") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "kube-api-access-kfwg7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.982533 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.983385 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr" (OuterVolumeSpecName: "kube-api-access-249nr") pod "b6312bbd-5731-4ea0-a20f-81d5a57df44a" (UID: "b6312bbd-5731-4ea0-a20f-81d5a57df44a"). InnerVolumeSpecName "kube-api-access-249nr". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: E0202 12:51:35.983982 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:36.483954873 +0000 UTC m=+23.499162407 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985107 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert" (OuterVolumeSpecName: "console-serving-cert") pod "43509403-f426-496e-be36-56cef71462f5" (UID: "43509403-f426-496e-be36-56cef71462f5"). InnerVolumeSpecName "console-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985445 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6509e943-70c6-444c-bc41-48a544e36fbd" (UID: "6509e943-70c6-444c-bc41-48a544e36fbd"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985442 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth" (OuterVolumeSpecName: "stats-auth") pod "c03ee662-fb2f-4fc4-a2c1-af487c19d254" (UID: "c03ee662-fb2f-4fc4-a2c1-af487c19d254"). InnerVolumeSpecName "stats-auth". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985726 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985824 4703 swap_util.go:74] "error creating dir to test if tmpfs noswap is enabled. Assuming not supported" mount path="" error="stat /var/lib/kubelet/plugins/kubernetes.io/empty-dir: no such file or directory" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.985452 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls" (OuterVolumeSpecName: "proxy-tls") pod "0b574797-001e-440a-8f4e-c0be86edad0f" (UID: "0b574797-001e-440a-8f4e-c0be86edad0f"). InnerVolumeSpecName "proxy-tls". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.986370 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config" (OuterVolumeSpecName: "auth-proxy-config") pod "31d8b7a1-420e-4252-a5b7-eebe8a111292" (UID: "31d8b7a1-420e-4252-a5b7-eebe8a111292"). InnerVolumeSpecName "auth-proxy-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.986619 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config" (OuterVolumeSpecName: "config") pod "9d4552c7-cd75-42dd-8880-30dd377c49a4" (UID: "9d4552c7-cd75-42dd-8880-30dd377c49a4"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.986922 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "49c341d1-5089-4bc2-86a0-a5e165cfcc6b" (UID: "49c341d1-5089-4bc2-86a0-a5e165cfcc6b"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.986976 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp" (OuterVolumeSpecName: "kube-api-access-qs4fp") pod "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" (UID: "210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c"). InnerVolumeSpecName "kube-api-access-qs4fp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.990973 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit" (OuterVolumeSpecName: "audit") pod "1bf7eb37-55a3-4c65-b768-a94c82151e69" (UID: "1bf7eb37-55a3-4c65-b768-a94c82151e69"). InnerVolumeSpecName "audit". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.996075 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "57a731c4-ef35-47a8-b875-bfb08a7f8011" (UID: "57a731c4-ef35-47a8-b875-bfb08a7f8011"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.999456 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/ef543e1b-8068-4ea3-b32a-61027b32e95d-webhook-cert\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:35 crc kubenswrapper[4703]: I0202 12:51:35.999651 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.000955 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.001496 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.001670 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.002481 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.002514 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.002534 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.003141 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:36.503092347 +0000 UTC m=+23.518299991 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.003194 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:36.50318218 +0000 UTC m=+23.518389714 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.008320 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rczfb\" (UniqueName: \"kubernetes.io/projected/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-kube-api-access-rczfb\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.010799 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5fe579f8-e8a6-4643-bce5-a661393c4dde" path="/var/lib/kubelet/pods/5fe579f8-e8a6-4643-bce5-a661393c4dde/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.010907 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.011176 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" (UID: "b11524ee-3fca-4b1b-9cdf-6da289fdbc7d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.011671 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.012056 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2kz5\" (UniqueName: \"kubernetes.io/projected/ef543e1b-8068-4ea3-b32a-61027b32e95d-kube-api-access-s2kz5\") pod \"network-node-identity-vrzqb\" (UID: \"ef543e1b-8068-4ea3-b32a-61027b32e95d\") " pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.012687 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/37a5e44f-9a88-4405-be8a-b645485e7312-metrics-tls\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.012696 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6402fda4-df10-493c-b4e5-d0569419652d" path="/var/lib/kubelet/pods/6402fda4-df10-493c-b4e5-d0569419652d/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.013845 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6509e943-70c6-444c-bc41-48a544e36fbd" path="/var/lib/kubelet/pods/6509e943-70c6-444c-bc41-48a544e36fbd/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.015128 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6731426b-95fe-49ff-bb5f-40441049fde2" path="/var/lib/kubelet/pods/6731426b-95fe-49ff-bb5f-40441049fde2/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.016366 4703 kubelet_volumes.go:152] "Cleaned up orphaned volume subpath from pod" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volume-subpaths/run-systemd/ovnkube-controller/6" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.016385 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5225d0e4-402f-4861-b410-819f433b1803" (UID: "5225d0e4-402f-4861-b410-819f433b1803"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.016487 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6ea678ab-3438-413e-bfe3-290ae7725660" path="/var/lib/kubelet/pods/6ea678ab-3438-413e-bfe3-290ae7725660/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.018173 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7539238d-5fe0-46ed-884e-1c3b566537ec" path="/var/lib/kubelet/pods/7539238d-5fe0-46ed-884e-1c3b566537ec/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.018756 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7583ce53-e0fe-4a16-9e4d-50516596a136" path="/var/lib/kubelet/pods/7583ce53-e0fe-4a16-9e4d-50516596a136/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.019609 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7bb08738-c794-4ee8-9972-3a62ca171029" path="/var/lib/kubelet/pods/7bb08738-c794-4ee8-9972-3a62ca171029/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.021638 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="87cf06ed-a83f-41a7-828d-70653580a8cb" path="/var/lib/kubelet/pods/87cf06ed-a83f-41a7-828d-70653580a8cb/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.021710 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rdwmf\" (UniqueName: \"kubernetes.io/projected/37a5e44f-9a88-4405-be8a-b645485e7312-kube-api-access-rdwmf\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.022528 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.022694 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8cea82b4-6893-4ddc-af9f-1bb5ae425c5b" path="/var/lib/kubelet/pods/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.023668 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="925f1c65-6136-48ba-85aa-3a3b50560753" path="/var/lib/kubelet/pods/925f1c65-6136-48ba-85aa-3a3b50560753/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.024373 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="96b93a3a-6083-4aea-8eab-fe1aa8245ad9" path="/var/lib/kubelet/pods/96b93a3a-6083-4aea-8eab-fe1aa8245ad9/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.025470 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9d4552c7-cd75-42dd-8880-30dd377c49a4" path="/var/lib/kubelet/pods/9d4552c7-cd75-42dd-8880-30dd377c49a4/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.025965 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a0128f3a-b052-44ed-a84e-c4c8aaf17c13" path="/var/lib/kubelet/pods/a0128f3a-b052-44ed-a84e-c4c8aaf17c13/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.027038 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a31745f5-9847-4afe-82a5-3161cc66ca93" path="/var/lib/kubelet/pods/a31745f5-9847-4afe-82a5-3161cc66ca93/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.027778 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b11524ee-3fca-4b1b-9cdf-6da289fdbc7d" path="/var/lib/kubelet/pods/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.029300 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6312bbd-5731-4ea0-a20f-81d5a57df44a" path="/var/lib/kubelet/pods/b6312bbd-5731-4ea0-a20f-81d5a57df44a/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.030131 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6cd30de-2eeb-49a2-ab40-9167f4560ff5" path="/var/lib/kubelet/pods/b6cd30de-2eeb-49a2-ab40-9167f4560ff5/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.031409 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bc5039c0-ea34-426b-a2b7-fbbc87b49a6d" path="/var/lib/kubelet/pods/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.031550 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.032009 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bd23aa5c-e532-4e53-bccf-e79f130c5ae8" path="/var/lib/kubelet/pods/bd23aa5c-e532-4e53-bccf-e79f130c5ae8/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.033663 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="bf126b07-da06-4140-9a57-dfd54fc6b486" path="/var/lib/kubelet/pods/bf126b07-da06-4140-9a57-dfd54fc6b486/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.034367 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c03ee662-fb2f-4fc4-a2c1-af487c19d254" path="/var/lib/kubelet/pods/c03ee662-fb2f-4fc4-a2c1-af487c19d254/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035639 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d" path="/var/lib/kubelet/pods/cd70aa09-68dd-4d64-bd6f-156fe6d1dc6d/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035795 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k6txp\" (UniqueName: \"kubernetes.io/projected/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-kube-api-access-k6txp\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035846 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035879 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c18a759f-5f28-4f90-866f-8f90476ba69c-proxy-tls\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035914 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xbrf\" (UniqueName: \"kubernetes.io/projected/c18a759f-5f28-4f90-866f-8f90476ba69c-kube-api-access-9xbrf\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035969 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-hosts-file\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.035997 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c18a759f-5f28-4f90-866f-8f90476ba69c-rootfs\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036020 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c18a759f-5f28-4f90-866f-8f90476ba69c-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036045 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036110 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036131 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zgdk5\" (UniqueName: \"kubernetes.io/projected/31d8b7a1-420e-4252-a5b7-eebe8a111292-kube-api-access-zgdk5\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036146 4703 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036191 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036204 4703 reconciler_common.go:293] "Volume detached for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59-serviceca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036219 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4d4hj\" (UniqueName: \"kubernetes.io/projected/3ab1a177-2de0-46d9-b765-d0d0649bb42e-kube-api-access-4d4hj\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036232 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8tdtz\" (UniqueName: \"kubernetes.io/projected/09efc573-dbb6-4249-bd59-9b87aba8dd28-kube-api-access-8tdtz\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036244 4703 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036257 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e7e6199b-1264-4501-8953-767f51328d08-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036268 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/01ab3dd5-8196-46d0-ad33-122e2ca51def-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036298 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036307 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e7e6199b-1264-4501-8953-767f51328d08" path="/var/lib/kubelet/pods/e7e6199b-1264-4501-8953-767f51328d08/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036344 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/d75a4c96-2883-4a0b-bab2-0fab2b6c0b49-host-slash\") pod \"iptables-alerter-4ln5h\" (UID: \"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\") " pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036308 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hosts-file\" (UniqueName: \"kubernetes.io/host-path/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-hosts-file\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036311 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036429 4703 reconciler_common.go:293] "Volume detached for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/fda69060-fa79-4696-b1a6-7980f124bf7c-mcd-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036448 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lz9wn\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-kube-api-access-lz9wn\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036462 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/496e6271-fb68-4057-954e-a0d97a4afa3f-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036475 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036486 4703 reconciler_common.go:293] "Volume detached for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-default-certificate\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036500 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036510 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dbsvg\" (UniqueName: \"kubernetes.io/projected/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-kube-api-access-dbsvg\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036520 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036529 4703 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036539 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fcqwp\" (UniqueName: \"kubernetes.io/projected/5fe579f8-e8a6-4643-bce5-a661393c4dde-kube-api-access-fcqwp\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036547 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036557 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcphl\" (UniqueName: \"kubernetes.io/projected/7583ce53-e0fe-4a16-9e4d-50516596a136-kube-api-access-xcphl\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036568 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pj782\" (UniqueName: \"kubernetes.io/projected/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-kube-api-access-pj782\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036486 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-etc-kube\" (UniqueName: \"kubernetes.io/host-path/37a5e44f-9a88-4405-be8a-b645485e7312-host-etc-kube\") pod \"network-operator-58b4c7f79c-55gtf\" (UID: \"37a5e44f-9a88-4405-be8a-b645485e7312\") " pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036578 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7539238d-5fe0-46ed-884e-1c3b566537ec-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036618 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036631 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kfwg7\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-kube-api-access-kfwg7\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036644 4703 reconciler_common.go:293] "Volume detached for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/a0128f3a-b052-44ed-a84e-c4c8aaf17c13-samples-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036657 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036670 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x7zkh\" (UniqueName: \"kubernetes.io/projected/6731426b-95fe-49ff-bb5f-40441049fde2-kube-api-access-x7zkh\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036683 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-7c4vf\" (UniqueName: \"kubernetes.io/projected/22c825df-677d-4ca6-82db-3454ed06e783-kube-api-access-7c4vf\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036721 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/e7e6199b-1264-4501-8953-767f51328d08-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036860 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036880 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036893 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-client\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036905 4703 reconciler_common.go:293] "Volume detached for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-node-bootstrap-token\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036916 4703 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036927 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/57a731c4-ef35-47a8-b875-bfb08a7f8011-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036938 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d4lsv\" (UniqueName: \"kubernetes.io/projected/25e176fe-21b4-4974-b1ed-c8b94f112a7f-kube-api-access-d4lsv\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036953 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-v47cf\" (UniqueName: \"kubernetes.io/projected/c03ee662-fb2f-4fc4-a2c1-af487c19d254-kube-api-access-v47cf\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036964 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/efdd0498-1daa-4136-9a4a-3b948c2293fc-webhook-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036975 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036986 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x4zgh\" (UniqueName: \"kubernetes.io/projected/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-kube-api-access-x4zgh\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.036996 4703 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/0b78653f-4ff9-4508-8672-245ed9b561e3-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037007 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1386a44e-36a2-460c-96d0-0359d2b6f0f5-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037020 4703 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037032 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2d4wz\" (UniqueName: \"kubernetes.io/projected/5441d097-087c-4d9a-baa8-b210afa90fc9-kube-api-access-2d4wz\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037046 4703 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/87cf06ed-a83f-41a7-828d-70653580a8cb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037058 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6ccd8\" (UniqueName: \"kubernetes.io/projected/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-kube-api-access-6ccd8\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037079 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b78653f-4ff9-4508-8672-245ed9b561e3-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037090 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qs4fp\" (UniqueName: \"kubernetes.io/projected/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-kube-api-access-qs4fp\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037102 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mnrrd\" (UniqueName: \"kubernetes.io/projected/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-kube-api-access-mnrrd\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037113 4703 reconciler_common.go:293] "Volume detached for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/25e176fe-21b4-4974-b1ed-c8b94f112a7f-signing-cabundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037124 4703 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037133 4703 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037141 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037149 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b11524ee-3fca-4b1b-9cdf-6da289fdbc7d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037157 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cfbct\" (UniqueName: \"kubernetes.io/projected/57a731c4-ef35-47a8-b875-bfb08a7f8011-kube-api-access-cfbct\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037165 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6402fda4-df10-493c-b4e5-d0569419652d-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037173 4703 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037184 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/0b78653f-4ff9-4508-8672-245ed9b561e3-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037192 4703 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037199 4703 reconciler_common.go:293] "Volume detached for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-audit\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037208 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037217 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037228 4703 reconciler_common.go:293] "Volume detached for volume \"certs\" (UniqueName: \"kubernetes.io/secret/5fe579f8-e8a6-4643-bce5-a661393c4dde-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037238 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fqsjt\" (UniqueName: \"kubernetes.io/projected/efdd0498-1daa-4136-9a4a-3b948c2293fc-kube-api-access-fqsjt\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037250 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mg5zb\" (UniqueName: \"kubernetes.io/projected/6402fda4-df10-493c-b4e5-d0569419652d-kube-api-access-mg5zb\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037261 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tk88c\" (UniqueName: \"kubernetes.io/projected/7539238d-5fe0-46ed-884e-1c3b566537ec-kube-api-access-tk88c\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037307 4703 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/b6cd30de-2eeb-49a2-ab40-9167f4560ff5-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037320 4703 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/8f668bae-612b-4b75-9490-919e737c6a3b-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037334 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-249nr\" (UniqueName: \"kubernetes.io/projected/b6312bbd-5731-4ea0-a20f-81d5a57df44a-kube-api-access-249nr\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037346 4703 reconciler_common.go:293] "Volume detached for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/43509403-f426-496e-be36-56cef71462f5-console-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037358 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037370 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037381 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/0b574797-001e-440a-8f4e-c0be86edad0f-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037392 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6509e943-70c6-444c-bc41-48a544e36fbd-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037403 4703 reconciler_common.go:293] "Volume detached for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-stats-auth\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037415 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9d4552c7-cd75-42dd-8880-30dd377c49a4-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037426 4703 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/925f1c65-6136-48ba-85aa-3a3b50560753-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037438 4703 reconciler_common.go:293] "Volume detached for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/0b574797-001e-440a-8f4e-c0be86edad0f-mcc-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037452 4703 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/8f668bae-612b-4b75-9490-919e737c6a3b-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037463 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/fda69060-fa79-4696-b1a6-7980f124bf7c-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037470 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="efdd0498-1daa-4136-9a4a-3b948c2293fc" path="/var/lib/kubelet/pods/efdd0498-1daa-4136-9a4a-3b948c2293fc/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037474 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7583ce53-e0fe-4a16-9e4d-50516596a136-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037616 4703 reconciler_common.go:293] "Volume detached for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/6731426b-95fe-49ff-bb5f-40441049fde2-control-plane-machine-set-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037635 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037654 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-s4n52\" (UniqueName: \"kubernetes.io/projected/925f1c65-6136-48ba-85aa-3a3b50560753-kube-api-access-s4n52\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037685 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037698 4703 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037709 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5441d097-087c-4d9a-baa8-b210afa90fc9-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037719 4703 reconciler_common.go:293] "Volume detached for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/925f1c65-6136-48ba-85aa-3a3b50560753-ovn-control-plane-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037730 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-etcd-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037740 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037764 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/8f668bae-612b-4b75-9490-919e737c6a3b-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037776 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w9rds\" (UniqueName: \"kubernetes.io/projected/20b0d48f-5fd6-431c-a545-e3c800c7b866-kube-api-access-w9rds\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037788 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pcxfs\" (UniqueName: \"kubernetes.io/projected/9d4552c7-cd75-42dd-8880-30dd377c49a4-kube-api-access-pcxfs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037797 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037806 4703 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/87cf06ed-a83f-41a7-828d-70653580a8cb-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037853 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qg5z5\" (UniqueName: \"kubernetes.io/projected/43509403-f426-496e-be36-56cef71462f5-kube-api-access-qg5z5\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037867 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/01ab3dd5-8196-46d0-ad33-122e2ca51def-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037880 4703 reconciler_common.go:293] "Volume detached for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-sysctl-allowlist\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037890 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037900 4703 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037927 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037937 4703 reconciler_common.go:293] "Volume detached for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/22c825df-677d-4ca6-82db-3454ed06e783-auth-proxy-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037947 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ngvvp\" (UniqueName: \"kubernetes.io/projected/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-kube-api-access-ngvvp\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037957 4703 reconciler_common.go:293] "Volume detached for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/a31745f5-9847-4afe-82a5-3161cc66ca93-metrics-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037966 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e7e6199b-1264-4501-8953-767f51328d08-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.037978 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bf2bz\" (UniqueName: \"kubernetes.io/projected/1d611f23-29be-4491-8495-bee1670e935f-kube-api-access-bf2bz\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038002 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jhbk2\" (UniqueName: \"kubernetes.io/projected/bd23aa5c-e532-4e53-bccf-e79f130c5ae8-kube-api-access-jhbk2\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038011 4703 reconciler_common.go:293] "Volume detached for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/7bb08738-c794-4ee8-9972-3a62ca171029-cni-binary-copy\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038021 4703 reconciler_common.go:293] "Volume detached for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9-profile-collector-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038030 4703 reconciler_common.go:293] "Volume detached for volume \"images\" (UniqueName: \"kubernetes.io/configmap/31d8b7a1-420e-4252-a5b7-eebe8a111292-images\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038039 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-2w9zh\" (UniqueName: \"kubernetes.io/projected/4bb40260-dbaa-4fb0-84df-5e680505d512-kube-api-access-2w9zh\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038050 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038078 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w7l8j\" (UniqueName: \"kubernetes.io/projected/01ab3dd5-8196-46d0-ad33-122e2ca51def-kube-api-access-w7l8j\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038088 4703 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038097 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rnphk\" (UniqueName: \"kubernetes.io/projected/bf126b07-da06-4140-9a57-dfd54fc6b486-kube-api-access-rnphk\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038106 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038115 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9d4552c7-cd75-42dd-8880-30dd377c49a4-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038124 4703 reconciler_common.go:293] "Volume detached for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/6402fda4-df10-493c-b4e5-d0569419652d-machine-api-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038135 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/496e6271-fb68-4057-954e-a0d97a4afa3f-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038158 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xcgwh\" (UniqueName: \"kubernetes.io/projected/fda69060-fa79-4696-b1a6-7980f124bf7c-kube-api-access-xcgwh\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038168 4703 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/c03ee662-fb2f-4fc4-a2c1-af487c19d254-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038177 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/09efc573-dbb6-4249-bd59-9b87aba8dd28-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038193 4703 reconciler_common.go:293] "Volume detached for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038201 4703 reconciler_common.go:293] "Volume detached for volume \"cert\" (UniqueName: \"kubernetes.io/secret/20b0d48f-5fd6-431c-a545-e3c800c7b866-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038210 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038234 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038244 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nzwt7\" (UniqueName: \"kubernetes.io/projected/96b93a3a-6083-4aea-8eab-fe1aa8245ad9-kube-api-access-nzwt7\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038254 4703 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/8f668bae-612b-4b75-9490-919e737c6a3b-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038264 4703 reconciler_common.go:293] "Volume detached for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b-tmpfs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038323 4703 reconciler_common.go:293] "Volume detached for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-etcd-serving-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038335 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6g6sz\" (UniqueName: \"kubernetes.io/projected/6509e943-70c6-444c-bc41-48a544e36fbd-kube-api-access-6g6sz\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038344 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7539238d-5fe0-46ed-884e-1c3b566537ec-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038353 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lzf88\" (UniqueName: \"kubernetes.io/projected/0b574797-001e-440a-8f4e-c0be86edad0f-kube-api-access-lzf88\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038429 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1d611f23-29be-4491-8495-bee1670e935f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038440 4703 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/6ea678ab-3438-413e-bfe3-290ae7725660-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038450 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5441d097-087c-4d9a-baa8-b210afa90fc9-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038459 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/7583ce53-e0fe-4a16-9e4d-50516596a136-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038469 4703 reconciler_common.go:293] "Volume detached for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/4bb40260-dbaa-4fb0-84df-5e680505d512-multus-daemon-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038496 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038506 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/31d8b7a1-420e-4252-a5b7-eebe8a111292-proxy-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038515 4703 reconciler_common.go:293] "Volume detached for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/c03ee662-fb2f-4fc4-a2c1-af487c19d254-metrics-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038526 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038535 4703 reconciler_common.go:293] "Volume detached for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/1bf7eb37-55a3-4c65-b768-a94c82151e69-encryption-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038546 4703 reconciler_common.go:293] "Volume detached for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/3ab1a177-2de0-46d9-b765-d0d0649bb42e-package-server-manager-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038571 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xfj7\" (UniqueName: \"kubernetes.io/projected/5225d0e4-402f-4861-b410-819f433b1803-kube-api-access-9xfj7\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038583 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/496e6271-fb68-4057-954e-a0d97a4afa3f-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038591 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038601 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038611 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/1386a44e-36a2-460c-96d0-0359d2b6f0f5-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038620 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038630 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zkvpv\" (UniqueName: \"kubernetes.io/projected/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-kube-api-access-zkvpv\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038637 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f88749ec-7931-4ee7-b3fc-1ec5e11f92e9" path="/var/lib/kubelet/pods/f88749ec-7931-4ee7-b3fc-1ec5e11f92e9/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038653 4703 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/09ae3b1a-e8e7-4524-b54b-61eab6f9239a-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038669 4703 reconciler_common.go:293] "Volume detached for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/bc5039c0-ea34-426b-a2b7-fbbc87b49a6d-available-featuregates\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038678 4703 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/a31745f5-9847-4afe-82a5-3161cc66ca93-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038689 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/09efc573-dbb6-4249-bd59-9b87aba8dd28-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038698 4703 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038706 4703 reconciler_common.go:293] "Volume detached for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/1bf7eb37-55a3-4c65-b768-a94c82151e69-image-import-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038729 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038738 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-w4xd4\" (UniqueName: \"kubernetes.io/projected/8cea82b4-6893-4ddc-af9f-1bb5ae425c5b-kube-api-access-w4xd4\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038748 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/bf126b07-da06-4140-9a57-dfd54fc6b486-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038756 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pjr6v\" (UniqueName: \"kubernetes.io/projected/49ef4625-1d3a-4a9f-b595-c2433d32326d-kube-api-access-pjr6v\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038765 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/a31745f5-9847-4afe-82a5-3161cc66ca93-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038773 4703 reconciler_common.go:293] "Volume detached for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-console-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038782 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sb6h7\" (UniqueName: \"kubernetes.io/projected/1bf7eb37-55a3-4c65-b768-a94c82151e69-kube-api-access-sb6h7\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038806 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/49c341d1-5089-4bc2-86a0-a5e165cfcc6b-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038815 4703 reconciler_common.go:293] "Volume detached for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/b6312bbd-5731-4ea0-a20f-81d5a57df44a-srv-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038823 4703 reconciler_common.go:293] "Volume detached for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/6509e943-70c6-444c-bc41-48a544e36fbd-service-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038831 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vt5rc\" (UniqueName: \"kubernetes.io/projected/44663579-783b-4372-86d6-acf235a62d72-kube-api-access-vt5rc\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038864 4703 reconciler_common.go:293] "Volume detached for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/bf126b07-da06-4140-9a57-dfd54fc6b486-image-registry-operator-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038893 4703 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/6ea678ab-3438-413e-bfe3-290ae7725660-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038939 4703 reconciler_common.go:293] "Volume detached for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-oauth-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.038974 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5225d0e4-402f-4861-b410-819f433b1803-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.039019 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-htfz6\" (UniqueName: \"kubernetes.io/projected/6ea678ab-3438-413e-bfe3-290ae7725660-kube-api-access-htfz6\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.039052 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d6qdx\" (UniqueName: \"kubernetes.io/projected/87cf06ed-a83f-41a7-828d-70653580a8cb-kube-api-access-d6qdx\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.039063 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/1386a44e-36a2-460c-96d0-0359d2b6f0f5-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.039073 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/43509403-f426-496e-be36-56cef71462f5-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.039218 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fda69060-fa79-4696-b1a6-7980f124bf7c" path="/var/lib/kubelet/pods/fda69060-fa79-4696-b1a6-7980f124bf7c/volumes" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.040294 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/kube-controller-manager-crc"] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.041612 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.051705 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.052012 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/0.log" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.058505 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" exitCode=255 Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.059124 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerDied","Data":"7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca"} Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.059174 4703 scope.go:117] "RemoveContainer" containerID="c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.064204 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k6txp\" (UniqueName: \"kubernetes.io/projected/66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8-kube-api-access-k6txp\") pod \"node-resolver-279cn\" (UID: \"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\") " pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.067696 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.069442 4703 kubelet.go:1929] "Failed creating a mirror pod for" err="pods \"kube-controller-manager-crc\" already exists" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.098531 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.119505 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.139836 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.140642 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c18a759f-5f28-4f90-866f-8f90476ba69c-rootfs\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.140675 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c18a759f-5f28-4f90-866f-8f90476ba69c-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.140697 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c18a759f-5f28-4f90-866f-8f90476ba69c-proxy-tls\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.140725 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xbrf\" (UniqueName: \"kubernetes.io/projected/c18a759f-5f28-4f90-866f-8f90476ba69c-kube-api-access-9xbrf\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.140755 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rootfs\" (UniqueName: \"kubernetes.io/host-path/c18a759f-5f28-4f90-866f-8f90476ba69c-rootfs\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.141601 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcd-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/c18a759f-5f28-4f90-866f-8f90476ba69c-mcd-auth-proxy-config\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.142022 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.142622 4703 scope.go:117] "RemoveContainer" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.142902 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.144567 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/c18a759f-5f28-4f90-866f-8f90476ba69c-proxy-tls\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.148015 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.157413 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xbrf\" (UniqueName: \"kubernetes.io/projected/c18a759f-5f28-4f90-866f-8f90476ba69c-kube-api-access-9xbrf\") pod \"machine-config-daemon-2vnzs\" (UID: \"c18a759f-5f28-4f90-866f-8f90476ba69c\") " pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.158322 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.163234 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.170424 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-operator/iptables-alerter-4ln5h" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.171315 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.179430 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-node-identity/network-node-identity-vrzqb" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.183001 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: W0202 12:51:36.183472 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod37a5e44f_9a88_4405_be8a_b645485e7312.slice/crio-ffff416c4a4a7b73a72a471ad5a08257c32d78f583249231fe45aa2e8c403862 WatchSource:0}: Error finding container ffff416c4a4a7b73a72a471ad5a08257c32d78f583249231fe45aa2e8c403862: Status 404 returned error can't find the container with id ffff416c4a4a7b73a72a471ad5a08257c32d78f583249231fe45aa2e8c403862 Feb 02 12:51:36 crc kubenswrapper[4703]: W0202 12:51:36.184069 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd75a4c96_2883_4a0b_bab2_0fab2b6c0b49.slice/crio-a07d4a551c0d47a49e7e35ee6b9c841802441c66d72e2bf3687440950a79a37e WatchSource:0}: Error finding container a07d4a551c0d47a49e7e35ee6b9c841802441c66d72e2bf3687440950a79a37e: Status 404 returned error can't find the container with id a07d4a551c0d47a49e7e35ee6b9c841802441c66d72e2bf3687440950a79a37e Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.185070 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/node-resolver-279cn" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.197612 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.214190 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:19Z\\\",\\\"message\\\":\\\"W0202 12:51:18.393884 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 12:51:18.394251 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770036678 cert, and key in /tmp/serving-cert-1094635916/serving-signer.crt, /tmp/serving-cert-1094635916/serving-signer.key\\\\nI0202 12:51:18.708069 1 observer_polling.go:159] Starting file observer\\\\nW0202 12:51:18.711453 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 12:51:18.711666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:18.726410 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1094635916/tls.crt::/tmp/serving-cert-1094635916/tls.key\\\\\\\"\\\\nF0202 12:51:18.960132 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.228679 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.232190 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.239945 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.250688 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.268594 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.279596 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.293317 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-8vjml"] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.293591 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-additional-cni-plugins-nxxh5"] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.293826 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.294493 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.299824 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.299986 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.300133 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.300255 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.300449 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.301020 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.301137 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.310088 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.324908 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.342978 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.377815 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.394883 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:19Z\\\",\\\"message\\\":\\\"W0202 12:51:18.393884 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 12:51:18.394251 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770036678 cert, and key in /tmp/serving-cert-1094635916/serving-signer.crt, /tmp/serving-cert-1094635916/serving-signer.key\\\\nI0202 12:51:18.708069 1 observer_polling.go:159] Starting file observer\\\\nW0202 12:51:18.711453 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 12:51:18.711666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:18.726410 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1094635916/tls.crt::/tmp/serving-cert-1094635916/tls.key\\\\\\\"\\\\nF0202 12:51:18.960132 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.424464 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444501 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-etc-kubernetes\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444544 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-os-release\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444567 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444587 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-bin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444608 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-binary-copy\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444627 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-cnibin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444645 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-conf-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444663 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-kubelet\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444694 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hv5xw\" (UniqueName: \"kubernetes.io/projected/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-kube-api-access-hv5xw\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444713 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-hostroot\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444737 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-system-cni-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444759 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-socket-dir-parent\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444777 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4f477\" (UniqueName: \"kubernetes.io/projected/5fe22056-9a8b-4eba-8776-c50531078e2f-kube-api-access-4f477\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444813 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444840 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-netns\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444871 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-k8s-cni-cncf-io\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444899 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-multus\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.444965 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445008 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-system-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445032 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-multus-certs\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445050 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-daemon-config\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445141 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cnibin\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445181 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-os-release\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.445209 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-cni-binary-copy\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.473347 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Certificate expiration is 2027-02-02 12:46:35 +0000 UTC, rotation deadline is 2026-11-25 12:19:47.755588931 +0000 UTC Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.473417 4703 certificate_manager.go:356] kubernetes.io/kube-apiserver-client-kubelet: Waiting 7103h28m11.282174154s for next certificate rotation Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.488722 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.506490 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546084 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546124 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546356 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-daemon-config\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546377 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cnibin\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546394 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-os-release\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546415 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-cni-binary-copy\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546432 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-etc-kubernetes\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546450 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546466 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-bin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546480 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-os-release\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546496 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-binary-copy\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546511 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-cnibin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546524 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-conf-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546539 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-kubelet\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546529 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cnibin\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546559 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546577 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hv5xw\" (UniqueName: \"kubernetes.io/projected/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-kube-api-access-hv5xw\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546591 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-hostroot\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546607 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-system-cni-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546622 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-socket-dir-parent\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546651 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4f477\" (UniqueName: \"kubernetes.io/projected/5fe22056-9a8b-4eba-8776-c50531078e2f-kube-api-access-4f477\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546669 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546685 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546700 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-netns\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546721 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-multus\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546857 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-os-release\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.546921 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"os-release\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-os-release\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547103 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-k8s-cni-cncf-io\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547135 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547163 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547186 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547190 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-bin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-bin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547243 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-kubernetes\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-etc-kubernetes\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547298 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-k8s-cni-cncf-io\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-k8s-cni-cncf-io\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547407 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547428 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547440 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547473 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-system-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547520 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-system-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547558 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547624 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547636 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547645 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547756 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:51:37.54740539 +0000 UTC m=+24.562612924 (durationBeforeRetry 1s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547809 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-netns\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547893 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-conf-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-conf-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547990 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-kubelet\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-kubelet\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547869 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-cni-dir\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-cni-dir\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.547845 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:37.547833804 +0000 UTC m=+24.563041338 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547993 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-multus\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-var-lib-cni-multus\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.548090 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-multus-certs\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.548089 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"hostroot\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-hostroot\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.548149 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-multus-certs\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-host-run-multus-certs\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.547840 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cnibin\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-cnibin\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.548144 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.548174 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:37.548124943 +0000 UTC m=+24.563332477 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.548247 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-socket-dir-parent\" (UniqueName: \"kubernetes.io/host-path/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-socket-dir-parent\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.548282 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:37.548248827 +0000 UTC m=+24.563456361 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.548389 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:37.54836209 +0000 UTC m=+24.563569624 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.548577 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"system-cni-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-system-cni-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.563357 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tuning-conf-dir\" (UniqueName: \"kubernetes.io/host-path/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-tuning-conf-dir\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.564947 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.577909 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.590020 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.601090 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.612601 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.691972 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j8d97"] Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.693286 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.695378 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.695594 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.695908 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.696230 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.696362 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.696461 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.696625 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.712235 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.723173 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.734642 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.743648 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.756847 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.766799 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.784163 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.795880 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.810681 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c5e32d6d432b3434fb4da6ad31f1ab0edab6d2198a313427514083eab3622592\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:19Z\\\",\\\"message\\\":\\\"W0202 12:51:18.393884 1 cmd.go:257] Using insecure, self-signed certificates\\\\nI0202 12:51:18.394251 1 crypto.go:601] Generating new CA for check-endpoints-signer@1770036678 cert, and key in /tmp/serving-cert-1094635916/serving-signer.crt, /tmp/serving-cert-1094635916/serving-signer.key\\\\nI0202 12:51:18.708069 1 observer_polling.go:159] Starting file observer\\\\nW0202 12:51:18.711453 1 builder.go:272] unable to get owner reference (falling back to namespace): Get \\\\\\\"https://localhost:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\nI0202 12:51:18.711666 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:18.726410 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-1094635916/tls.crt::/tmp/serving-cert-1094635916/tls.key\\\\\\\"\\\\nF0202 12:51:18.960132 1 cmd.go:182] error initializing delegating authentication: unable to load configmap based request-header-client-ca-file: Get \\\\\\\"https://localhost:6443/api/v1/namespaces/kube-system/configmaps/extension-apiserver-authentication\\\\\\\": dial tcp [::1]:6443: connect: connection refused\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.820447 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.828889 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.842976 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851455 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sg6fs\" (UniqueName: \"kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851490 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851524 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851540 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851607 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851631 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851671 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851704 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851719 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851739 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851754 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851767 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851798 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851821 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851838 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851868 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851887 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851912 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851927 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.851942 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.857977 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.864033 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 20:26:51.781050416 +0000 UTC Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.933457 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.933468 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.933583 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:36 crc kubenswrapper[4703]: E0202 12:51:36.933708 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.949029 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"multus-daemon-config\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-multus-daemon-config\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.949077 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/5fe22056-9a8b-4eba-8776-c50531078e2f-cni-binary-copy\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.952830 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4f477\" (UniqueName: \"kubernetes.io/projected/5fe22056-9a8b-4eba-8776-c50531078e2f-kube-api-access-4f477\") pod \"multus-8vjml\" (UID: \"5fe22056-9a8b-4eba-8776-c50531078e2f\") " pod="openshift-multus/multus-8vjml" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.952936 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.952969 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.952995 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953017 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953031 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953038 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953068 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953085 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953084 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953115 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953124 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953103 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953143 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953159 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953175 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953183 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953207 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953241 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953261 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953310 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953331 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953369 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953390 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953412 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953431 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953455 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953477 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sg6fs\" (UniqueName: \"kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953517 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953578 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953614 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953646 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953674 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953706 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953763 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953771 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.953847 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.954222 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.954519 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.957258 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:36 crc kubenswrapper[4703]: I0202 12:51:36.971583 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sg6fs\" (UniqueName: \"kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs\") pod \"ovnkube-node-j8d97\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.004717 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.006411 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-binary-copy\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-binary-copy\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.006874 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cni-sysctl-allowlist\" (UniqueName: \"kubernetes.io/configmap/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-cni-sysctl-allowlist\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.010069 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hv5xw\" (UniqueName: \"kubernetes.io/projected/f5d4c5d7-4a35-465b-aed5-64f5cfe37533-kube-api-access-hv5xw\") pod \"multus-additional-cni-plugins-nxxh5\" (UID: \"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\") " pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:37 crc kubenswrapper[4703]: W0202 12:51:37.019260 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod40400eeb_f9bd_4816_b65f_a25b0c3d021e.slice/crio-ef67a9b05fafb34ba54381820a283ba99313f3c93c90327c3276f4355ddb87ef WatchSource:0}: Error finding container ef67a9b05fafb34ba54381820a283ba99313f3c93c90327c3276f4355ddb87ef: Status 404 returned error can't find the container with id ef67a9b05fafb34ba54381820a283ba99313f3c93c90327c3276f4355ddb87ef Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.086204 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.086242 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" event={"ID":"37a5e44f-9a88-4405-be8a-b645485e7312","Type":"ContainerStarted","Data":"ffff416c4a4a7b73a72a471ad5a08257c32d78f583249231fe45aa2e8c403862"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.088972 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"ef67a9b05fafb34ba54381820a283ba99313f3c93c90327c3276f4355ddb87ef"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.089735 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-279cn" event={"ID":"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8","Type":"ContainerStarted","Data":"35ad1856d362401ca437cdd603b6980c234a7f767dc92602a7c33210b5bb8ee7"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.091191 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.093064 4703 scope.go:117] "RemoveContainer" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.093198 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.094394 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.094422 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"d03c6fadfeaed6c34265a623502b805fd757baf2e5688fdfae3d77002ce87746"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.095392 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.095414 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"175973a779a027af95419d1197073e07b21ced591879c8d6367d9ec323625af7"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.097960 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"a07d4a551c0d47a49e7e35ee6b9c841802441c66d72e2bf3687440950a79a37e"} Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.110584 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.118925 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.128925 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.140065 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.153741 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.163280 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.173711 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.190367 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.224607 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-8vjml" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.231206 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.232215 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" Feb 02 12:51:37 crc kubenswrapper[4703]: W0202 12:51:37.243772 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fe22056_9a8b_4eba_8776_c50531078e2f.slice/crio-c52394669462a5efb602949fdfb8d9b9c8f0dfcfcb70b8b661559dcea6df73bc WatchSource:0}: Error finding container c52394669462a5efb602949fdfb8d9b9c8f0dfcfcb70b8b661559dcea6df73bc: Status 404 returned error can't find the container with id c52394669462a5efb602949fdfb8d9b9c8f0dfcfcb70b8b661559dcea6df73bc Feb 02 12:51:37 crc kubenswrapper[4703]: W0202 12:51:37.244555 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5d4c5d7_4a35_465b_aed5_64f5cfe37533.slice/crio-e36b670b66bc28a9e48eee045f6c640662cf7eba88d13a4fe66a5d1e3792879b WatchSource:0}: Error finding container e36b670b66bc28a9e48eee045f6c640662cf7eba88d13a4fe66a5d1e3792879b: Status 404 returned error can't find the container with id e36b670b66bc28a9e48eee045f6c640662cf7eba88d13a4fe66a5d1e3792879b Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.273086 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.312708 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.350655 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.392795 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [kubecfg-setup]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.559093 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.559188 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.559216 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.559237 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559262 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:51:39.559235305 +0000 UTC m=+26.574442889 (durationBeforeRetry 2s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559313 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.559319 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559361 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:39.559348209 +0000 UTC m=+26.574555733 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559492 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559508 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559519 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559551 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:39.559542715 +0000 UTC m=+26.574750249 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559605 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559618 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559627 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559651 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:39.559643538 +0000 UTC m=+26.574851172 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559653 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.559681 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:39.559674379 +0000 UTC m=+26.574882023 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.864347 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 18:13:33.740885252 +0000 UTC Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.933728 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:37 crc kubenswrapper[4703]: E0202 12:51:37.933842 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.937197 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01ab3dd5-8196-46d0-ad33-122e2ca51def" path="/var/lib/kubelet/pods/01ab3dd5-8196-46d0-ad33-122e2ca51def/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.959526 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09ae3b1a-e8e7-4524-b54b-61eab6f9239a" path="/var/lib/kubelet/pods/09ae3b1a-e8e7-4524-b54b-61eab6f9239a/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.960463 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="09efc573-dbb6-4249-bd59-9b87aba8dd28" path="/var/lib/kubelet/pods/09efc573-dbb6-4249-bd59-9b87aba8dd28/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.961324 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b574797-001e-440a-8f4e-c0be86edad0f" path="/var/lib/kubelet/pods/0b574797-001e-440a-8f4e-c0be86edad0f/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.961964 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b78653f-4ff9-4508-8672-245ed9b561e3" path="/var/lib/kubelet/pods/0b78653f-4ff9-4508-8672-245ed9b561e3/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.962568 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1386a44e-36a2-460c-96d0-0359d2b6f0f5" path="/var/lib/kubelet/pods/1386a44e-36a2-460c-96d0-0359d2b6f0f5/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.963227 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1bf7eb37-55a3-4c65-b768-a94c82151e69" path="/var/lib/kubelet/pods/1bf7eb37-55a3-4c65-b768-a94c82151e69/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.963990 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1d611f23-29be-4491-8495-bee1670e935f" path="/var/lib/kubelet/pods/1d611f23-29be-4491-8495-bee1670e935f/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.964710 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="20b0d48f-5fd6-431c-a545-e3c800c7b866" path="/var/lib/kubelet/pods/20b0d48f-5fd6-431c-a545-e3c800c7b866/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.965238 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c" path="/var/lib/kubelet/pods/210d8245-ebfc-4e3b-ac4a-e21ce76f9a7c/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.965903 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="22c825df-677d-4ca6-82db-3454ed06e783" path="/var/lib/kubelet/pods/22c825df-677d-4ca6-82db-3454ed06e783/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.966657 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="25e176fe-21b4-4974-b1ed-c8b94f112a7f" path="/var/lib/kubelet/pods/25e176fe-21b4-4974-b1ed-c8b94f112a7f/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.967235 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="308be0ea-9f5f-4b29-aeb1-5abd31a0b17b" path="/var/lib/kubelet/pods/308be0ea-9f5f-4b29-aeb1-5abd31a0b17b/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.967856 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="31d8b7a1-420e-4252-a5b7-eebe8a111292" path="/var/lib/kubelet/pods/31d8b7a1-420e-4252-a5b7-eebe8a111292/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.968504 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3ab1a177-2de0-46d9-b765-d0d0649bb42e" path="/var/lib/kubelet/pods/3ab1a177-2de0-46d9-b765-d0d0649bb42e/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.969036 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3cb93b32-e0ae-4377-b9c8-fdb9842c6d59" path="/var/lib/kubelet/pods/3cb93b32-e0ae-4377-b9c8-fdb9842c6d59/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.969761 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="43509403-f426-496e-be36-56cef71462f5" path="/var/lib/kubelet/pods/43509403-f426-496e-be36-56cef71462f5/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.970261 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="44663579-783b-4372-86d6-acf235a62d72" path="/var/lib/kubelet/pods/44663579-783b-4372-86d6-acf235a62d72/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.971003 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="496e6271-fb68-4057-954e-a0d97a4afa3f" path="/var/lib/kubelet/pods/496e6271-fb68-4057-954e-a0d97a4afa3f/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.972536 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49c341d1-5089-4bc2-86a0-a5e165cfcc6b" path="/var/lib/kubelet/pods/49c341d1-5089-4bc2-86a0-a5e165cfcc6b/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.973132 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="49ef4625-1d3a-4a9f-b595-c2433d32326d" path="/var/lib/kubelet/pods/49ef4625-1d3a-4a9f-b595-c2433d32326d/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.973893 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4bb40260-dbaa-4fb0-84df-5e680505d512" path="/var/lib/kubelet/pods/4bb40260-dbaa-4fb0-84df-5e680505d512/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.974465 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5225d0e4-402f-4861-b410-819f433b1803" path="/var/lib/kubelet/pods/5225d0e4-402f-4861-b410-819f433b1803/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.976027 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5441d097-087c-4d9a-baa8-b210afa90fc9" path="/var/lib/kubelet/pods/5441d097-087c-4d9a-baa8-b210afa90fc9/volumes" Feb 02 12:51:37 crc kubenswrapper[4703]: I0202 12:51:37.976547 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="57a731c4-ef35-47a8-b875-bfb08a7f8011" path="/var/lib/kubelet/pods/57a731c4-ef35-47a8-b875-bfb08a7f8011/volumes" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.101390 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737" exitCode=0 Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.101488 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.103425 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.104878 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" event={"ID":"ef543e1b-8068-4ea3-b32a-61027b32e95d","Type":"ContainerStarted","Data":"9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.106034 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/node-resolver-279cn" event={"ID":"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8","Type":"ContainerStarted","Data":"db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.107208 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerStarted","Data":"a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.107239 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerStarted","Data":"e36b670b66bc28a9e48eee045f6c640662cf7eba88d13a4fe66a5d1e3792879b"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.108928 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerStarted","Data":"eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.108991 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerStarted","Data":"c52394669462a5efb602949fdfb8d9b9c8f0dfcfcb70b8b661559dcea6df73bc"} Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.124513 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.138309 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.154510 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.166195 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.175498 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.188474 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [machine-config-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.201701 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.213103 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.229204 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.239766 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-operator]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":false,\\\"restartCount\\\":5,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.248084 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [dns-node-resolver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.258419 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.273376 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [webhook approver]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":false,\\\"restartCount\\\":6,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.287795 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.297336 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.306805 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.318354 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.329513 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.354825 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.369562 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.387860 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.412197 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.425673 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": dial tcp 127.0.0.1:9743: connect: connection refused" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.437482 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.449578 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.460704 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.865145 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-12 05:29:04.14706 +0000 UTC Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.876285 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/node-ca-qqhwl"] Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.876641 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.878512 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.879227 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.879243 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.879322 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.896619 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.912369 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.927126 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.933300 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.933423 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:38 crc kubenswrapper[4703]: E0202 12:51:38.933524 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:38 crc kubenswrapper[4703]: E0202 12:51:38.933626 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.939003 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.950458 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.963551 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.974001 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5deea29f-df1d-4530-b29b-f50e5f40edeb-host\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.974052 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d96wm\" (UniqueName: \"kubernetes.io/projected/5deea29f-df1d-4530-b29b-f50e5f40edeb-kube-api-access-d96wm\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.974125 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5deea29f-df1d-4530-b29b-f50e5f40edeb-serviceca\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:38 crc kubenswrapper[4703]: I0202 12:51:38.977261 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [egress-router-binary-copy cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.000529 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:38Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.012365 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.021016 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.032802 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.044165 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.053589 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.070923 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.075807 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d96wm\" (UniqueName: \"kubernetes.io/projected/5deea29f-df1d-4530-b29b-f50e5f40edeb-kube-api-access-d96wm\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.075860 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5deea29f-df1d-4530-b29b-f50e5f40edeb-serviceca\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.075890 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5deea29f-df1d-4530-b29b-f50e5f40edeb-host\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.075943 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host\" (UniqueName: \"kubernetes.io/host-path/5deea29f-df1d-4530-b29b-f50e5f40edeb-host\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.076777 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serviceca\" (UniqueName: \"kubernetes.io/configmap/5deea29f-df1d-4530-b29b-f50e5f40edeb-serviceca\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.099014 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d96wm\" (UniqueName: \"kubernetes.io/projected/5deea29f-df1d-4530-b29b-f50e5f40edeb-kube-api-access-d96wm\") pod \"node-ca-qqhwl\" (UID: \"5deea29f-df1d-4530-b29b-f50e5f40edeb\") " pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.114241 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3" exitCode=0 Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.114355 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.115842 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" event={"ID":"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49","Type":"ContainerStarted","Data":"0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119838 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119889 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119905 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119918 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119931 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.119944 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4"} Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.139944 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [iptables-alerter]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.174691 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.188350 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/node-ca-qqhwl" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.213180 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: W0202 12:51:39.216798 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5deea29f_df1d_4530_b29b_f50e5f40edeb.slice/crio-4624c92b1a3629e2af4e084a433e4dc6131cbdc03f338ab88d0056bdef731a2a WatchSource:0}: Error finding container 4624c92b1a3629e2af4e084a433e4dc6131cbdc03f338ab88d0056bdef731a2a: Status 404 returned error can't find the container with id 4624c92b1a3629e2af4e084a433e4dc6131cbdc03f338ab88d0056bdef731a2a Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.253495 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.290087 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.333955 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.371916 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.411394 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.453753 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.490506 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.513980 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.514688 4703 scope.go:117] "RemoveContainer" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.514904 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-apiserver-check-endpoints\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\"" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.531868 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.572579 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.582239 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.582368 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582401 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:51:43.582373839 +0000 UTC m=+30.597581403 (durationBeforeRetry 4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.582447 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.582490 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582509 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582527 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582540 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.582568 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582592 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:43.582574665 +0000 UTC m=+30.597782199 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582658 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582659 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582697 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582677 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582750 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582768 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:43.58274501 +0000 UTC m=+30.597952594 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582791 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:43.582780962 +0000 UTC m=+30.597988616 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.582811 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:43.582802492 +0000 UTC m=+30.598010136 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.614482 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.653553 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.691945 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.735968 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.785293 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.818105 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.855938 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"message\\\":\\\"containers with unready status: [node-ca]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.865593 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 14:53:12.243255246 +0000 UTC Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.894436 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.931964 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.933056 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:39 crc kubenswrapper[4703]: E0202 12:51:39.933198 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:39 crc kubenswrapper[4703]: I0202 12:51:39.972797 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:39Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.011650 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.059079 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.093312 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.125002 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b" exitCode=0 Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.125099 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b"} Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.126809 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qqhwl" event={"ID":"5deea29f-df1d-4530-b29b-f50e5f40edeb","Type":"ContainerStarted","Data":"0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33"} Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.126841 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/node-ca-qqhwl" event={"ID":"5deea29f-df1d-4530-b29b-f50e5f40edeb","Type":"ContainerStarted","Data":"4624c92b1a3629e2af4e084a433e4dc6131cbdc03f338ab88d0056bdef731a2a"} Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.145305 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [cni-plugins bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.177132 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.212803 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.250235 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.293671 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.333852 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.372001 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.412461 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.451167 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.493418 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [bond-cni-plugin routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.536742 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.571727 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.610526 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.650571 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.689376 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.732485 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.770250 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:40Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.865893 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-11 08:51:29.531177009 +0000 UTC Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.933325 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:40 crc kubenswrapper[4703]: I0202 12:51:40.933386 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:40 crc kubenswrapper[4703]: E0202 12:51:40.933442 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:40 crc kubenswrapper[4703]: E0202 12:51:40.933519 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.133838 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744" exitCode=0 Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.133915 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.139588 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.147734 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.199737 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.211769 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.224982 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.239027 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.252119 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.265674 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.276915 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.290791 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.303532 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.315223 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.322331 4703 kubelet_node_status.go:401] "Setting node annotation to enable volume controller attach/detach" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.324160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.324190 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.324200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.324315 4703 kubelet_node_status.go:76] "Attempting to register node" node="crc" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.328313 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [routeoverride-cni whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.329771 4703 kubelet_node_status.go:115] "Node was previously registered" node="crc" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.330018 4703 kubelet_node_status.go:79] "Successfully registered node" node="crc" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.331123 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.331155 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.331164 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.331180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.331192 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.346267 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.351620 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.354411 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.354446 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.354457 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.354471 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.354482 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.365471 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.371508 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.371545 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.371554 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.371568 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.371578 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.372181 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.382735 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.386354 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.386381 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.386391 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.386404 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.386412 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.397189 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.400693 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.400739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.400752 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.400768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.400780 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.412096 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:41Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.412204 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.413827 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.413863 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.413874 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.413887 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.413897 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.519471 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.519766 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.519776 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.519859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.519871 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.622395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.622434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.622446 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.622460 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.622470 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.724411 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.724450 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.724463 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.724478 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.724487 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.828073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.828103 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.828112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.828128 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.828138 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.866103 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 21:54:36.215936647 +0000 UTC Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.931767 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.931911 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.931939 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.932013 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.932052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:41Z","lastTransitionTime":"2026-02-02T12:51:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:41 crc kubenswrapper[4703]: I0202 12:51:41.932973 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:41 crc kubenswrapper[4703]: E0202 12:51:41.933337 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.035175 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.035235 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.035245 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.035266 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.035310 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.137483 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.137521 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.137530 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.137544 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.137554 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.144553 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205" exitCode=0 Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.144593 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.162218 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.176104 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.188783 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.205682 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.218787 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.230908 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.242628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.242660 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.242670 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.242682 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.242691 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.248509 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni-bincopy whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.266695 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.280961 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.295247 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.308167 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.320295 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.337594 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.344608 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.344926 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.344941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.344957 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.344967 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.349056 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.447475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.447517 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.447527 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.447540 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.447550 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.549571 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.549609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.549617 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.549635 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.549650 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.652301 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.652342 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.652350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.652364 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.652373 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.754913 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.754965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.754982 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.755003 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.755015 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.858928 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.858979 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.858996 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.859023 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.859049 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.866584 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-14 21:39:49.760502121 +0000 UTC Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.933326 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.933385 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:42 crc kubenswrapper[4703]: E0202 12:51:42.933444 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:42 crc kubenswrapper[4703]: E0202 12:51:42.933506 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.961834 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.961878 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.961889 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.961930 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:42 crc kubenswrapper[4703]: I0202 12:51:42.961945 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:42Z","lastTransitionTime":"2026-02-02T12:51:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.064704 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.064754 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.064766 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.064792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.064808 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.153259 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f" exitCode=0 Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.153324 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.168050 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.168114 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.168126 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.168149 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.168164 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.171054 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.185557 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.204749 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.221025 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.237092 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.250747 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.264377 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.270358 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.270398 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.270407 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.270423 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.270432 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.280433 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.296250 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.318768 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.335840 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.348577 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.365259 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.373693 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.373743 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.373756 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.373781 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.373794 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.379389 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.476216 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.476261 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.476298 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.476317 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.476328 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.579171 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.579219 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.579231 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.579256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.579268 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.624781 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.624984 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.624940049 +0000 UTC m=+38.640147593 (durationBeforeRetry 8s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.625131 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.625161 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.625185 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.625207 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625294 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625316 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625340 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625355 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625367 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625368 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.625356442 +0000 UTC m=+38.640563986 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625414 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.625400953 +0000 UTC m=+38.640608487 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625426 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.625419584 +0000 UTC m=+38.640627108 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625512 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625580 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625601 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.625697 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.625665432 +0000 UTC m=+38.640873136 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.655950 4703 transport.go:147] "Certificate rotation detected, shutting down client connections to start using new credentials" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.682858 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.682903 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.682914 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.682933 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.682945 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.786501 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.786567 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.786580 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.786597 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.786610 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.867343 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-28 04:02:32.558954999 +0000 UTC Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.889396 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.889453 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.889464 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.889483 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.889497 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.933512 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:43 crc kubenswrapper[4703]: E0202 12:51:43.933628 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.947477 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.958845 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.972243 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.985486 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.991093 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.991127 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.991136 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.991150 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:43 crc kubenswrapper[4703]: I0202 12:51:43.991161 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:43Z","lastTransitionTime":"2026-02-02T12:51:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.000248 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.015183 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.030775 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.045109 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.058596 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.071164 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.083398 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.092873 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.092902 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.092914 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.092929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.092939 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.097713 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.115473 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovn-controller ovn-acl-logging kube-rbac-proxy-node kube-rbac-proxy-ovn-metrics northd nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.127032 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.162981 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.163894 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.163928 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.163977 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.169463 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5d4c5d7-4a35-465b-aed5-64f5cfe37533" containerID="dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c" exitCode=0 Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.169545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerDied","Data":"dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.189725 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.202074 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.202111 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.202120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.202139 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.202149 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.213555 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.213617 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.218506 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.230365 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.242541 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.253799 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.266752 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.278945 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.293662 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.304182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.304211 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.304219 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.304233 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.304243 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.312176 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with incomplete status: [whereabouts-cni]\\\",\\\"reason\\\":\\\"ContainersNotInitialized\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.336407 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [nbdb sbdb ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.355557 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.369046 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.382506 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.392076 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.404997 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.406666 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.406692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.406700 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.406733 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.406745 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.420414 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.431789 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.441963 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.452590 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.463796 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.480041 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.492748 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus-additional-cni-plugins]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"PodInitializing\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.504247 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.509672 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.509708 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.509748 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.509762 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.509772 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.515463 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.524790 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.535389 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.547185 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.555660 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.612575 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.612609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.612619 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.612632 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.612643 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.714458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.714487 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.714495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.714510 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.714519 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.817220 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.817263 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.817290 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.817309 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.817326 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.868146 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 17:06:38.670358253 +0000 UTC Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.920179 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.920223 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.920232 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.920246 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.920257 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:44Z","lastTransitionTime":"2026-02-02T12:51:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.933437 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:44 crc kubenswrapper[4703]: I0202 12:51:44.933454 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:44 crc kubenswrapper[4703]: E0202 12:51:44.933538 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:44 crc kubenswrapper[4703]: E0202 12:51:44.933597 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.022335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.022383 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.022395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.022414 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.022427 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.124321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.124362 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.124376 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.124391 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.124402 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.176761 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" event={"ID":"f5d4c5d7-4a35-465b-aed5-64f5cfe37533","Type":"ContainerStarted","Data":"d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.189661 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.201166 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.212375 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.222973 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.226090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.226118 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.226127 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.226139 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.226148 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.234375 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.244647 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.257019 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.278075 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.290998 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.302419 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.313241 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.322566 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.327700 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.327728 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.327737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.327752 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.327760 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.335236 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.344771 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:45Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.429475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.429509 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.429519 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.429549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.429560 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.531947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.531999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.532011 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.532029 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.532044 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.634243 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.634311 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.634323 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.634343 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.634361 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.737096 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.737139 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.737151 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.737167 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.737178 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.842654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.842699 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.842708 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.842724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.842735 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.868370 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 09:08:23.062595226 +0000 UTC Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.933365 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:45 crc kubenswrapper[4703]: E0202 12:51:45.933517 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.944927 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.944960 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.944968 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.944981 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:45 crc kubenswrapper[4703]: I0202 12:51:45.944990 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:45Z","lastTransitionTime":"2026-02-02T12:51:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.046878 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.046918 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.046929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.046946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.046960 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.148838 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.148908 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.148925 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.148950 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.148967 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.252350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.252393 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.252402 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.252423 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.252436 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.360431 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.360488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.360503 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.360533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.360548 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.463000 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.463060 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.463073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.463095 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.463108 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.565260 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.565303 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.565314 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.565326 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.565336 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.667950 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.668373 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.668473 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.668641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.668713 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.771782 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.771806 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.771813 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.771826 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.771834 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.869430 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-05 23:33:17.862225458 +0000 UTC Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.874322 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.874376 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.874386 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.874402 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.874412 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.933446 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.933446 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:46 crc kubenswrapper[4703]: E0202 12:51:46.933562 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:46 crc kubenswrapper[4703]: E0202 12:51:46.933622 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.976618 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.976656 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.976664 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.976683 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:46 crc kubenswrapper[4703]: I0202 12:51:46.976692 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:46Z","lastTransitionTime":"2026-02-02T12:51:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.078588 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.078845 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.078919 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.078997 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.079083 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.181701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.181741 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.181756 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.181773 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.181785 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.286373 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.286411 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.286421 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.286435 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.286448 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.388051 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.388091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.388100 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.388115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.388124 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.489915 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.489968 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.489978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.489994 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.490011 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.593038 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.593104 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.593117 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.593200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.593248 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.695420 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.695458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.695467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.695481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.695490 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.798124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.798177 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.798196 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.798223 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.798239 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.869645 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 03:37:37.316229495 +0000 UTC Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.900869 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.900934 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.900945 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.900961 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.900975 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:47Z","lastTransitionTime":"2026-02-02T12:51:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:47 crc kubenswrapper[4703]: I0202 12:51:47.933327 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:47 crc kubenswrapper[4703]: E0202 12:51:47.933505 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.003311 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.003378 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.003386 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.003399 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.003408 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.111010 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.111072 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.111087 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.111122 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.111191 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.202058 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/0.log" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.205246 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55" exitCode=1 Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.205328 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.207805 4703 scope.go:117] "RemoveContainer" containerID="79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.214602 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.214682 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.214698 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.214803 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.214847 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.222838 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.239068 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.253672 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.269495 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.287056 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.305651 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.318168 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.318217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.318228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.318244 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.318256 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.332614 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.349570 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.366132 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.382241 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.394580 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.413015 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.420731 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.420763 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.420772 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.420786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.420796 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.435059 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.449657 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:48Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.523805 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.523853 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.523878 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.523896 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.523908 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.625996 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.626042 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.626054 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.626071 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.626084 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.729630 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.729685 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.729694 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.729714 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.729728 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.832198 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.832260 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.832285 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.832301 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.832312 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.870410 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-18 17:18:16.76725142 +0000 UTC Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.932867 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.932901 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:48 crc kubenswrapper[4703]: E0202 12:51:48.932990 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:48 crc kubenswrapper[4703]: E0202 12:51:48.933062 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.935084 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.935118 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.935130 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.935146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:48 crc kubenswrapper[4703]: I0202 12:51:48.935160 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:48Z","lastTransitionTime":"2026-02-02T12:51:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.039766 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.039814 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.039829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.039852 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.039869 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.142714 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.142770 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.142785 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.142819 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.142839 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.167868 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6"] Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.168348 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.170978 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.171182 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.182228 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.204990 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.213730 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/0.log" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.219092 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.219641 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.220506 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.234316 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.246315 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.246382 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.246396 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.246412 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.246423 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.248387 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.273208 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.281398 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.281486 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.281516 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54fc7\" (UniqueName: \"kubernetes.io/projected/051f5a57-faac-43af-9d4b-c83992dae9a2-kube-api-access-54fc7\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.281559 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/051f5a57-faac-43af-9d4b-c83992dae9a2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.286903 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.302233 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.315037 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.333193 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.348691 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.348723 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.348734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.348746 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.348755 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.355352 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.379965 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.382530 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.382693 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.382771 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54fc7\" (UniqueName: \"kubernetes.io/projected/051f5a57-faac-43af-9d4b-c83992dae9a2-kube-api-access-54fc7\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.382868 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/051f5a57-faac-43af-9d4b-c83992dae9a2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.383703 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-ovnkube-config\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.384025 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/051f5a57-faac-43af-9d4b-c83992dae9a2-env-overrides\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.391123 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-control-plane-metrics-cert\" (UniqueName: \"kubernetes.io/secret/051f5a57-faac-43af-9d4b-c83992dae9a2-ovn-control-plane-metrics-cert\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.395011 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.400700 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54fc7\" (UniqueName: \"kubernetes.io/projected/051f5a57-faac-43af-9d4b-c83992dae9a2-kube-api-access-54fc7\") pod \"ovnkube-control-plane-749d76644c-v86r6\" (UID: \"051f5a57-faac-43af-9d4b-c83992dae9a2\") " pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.408844 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.434985 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.450919 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.450975 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.450989 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.451008 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.451022 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.452467 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.462868 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.474406 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.480394 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.488683 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: W0202 12:51:49.491803 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod051f5a57_faac_43af_9d4b_c83992dae9a2.slice/crio-4e72a3d54035454f173aedf7e6a37fc1e7be37342e3b38ccb3b041d49a9892cd WatchSource:0}: Error finding container 4e72a3d54035454f173aedf7e6a37fc1e7be37342e3b38ccb3b041d49a9892cd: Status 404 returned error can't find the container with id 4e72a3d54035454f173aedf7e6a37fc1e7be37342e3b38ccb3b041d49a9892cd Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.504789 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.519025 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.531131 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.544001 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.554381 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.554422 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.554432 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.554449 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.554461 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.558453 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.576819 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.587956 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.601624 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.611517 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.622230 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.632509 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.657172 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.657204 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.657213 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.657225 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.657234 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.759969 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.760014 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.760029 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.760045 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.760056 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.862542 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.862579 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.862587 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.862601 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.862609 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.870899 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 15:38:46.442531211 +0000 UTC Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.909266 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/network-metrics-daemon-n2htj"] Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.909740 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:49 crc kubenswrapper[4703]: E0202 12:51:49.909809 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.919996 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.930588 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.932859 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:49 crc kubenswrapper[4703]: E0202 12:51:49.933022 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.940973 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.953009 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.965146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.965181 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.965190 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.965203 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.965212 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:49Z","lastTransitionTime":"2026-02-02T12:51:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.966344 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.976992 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:49 crc kubenswrapper[4703]: I0202 12:51:49.987169 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.000538 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:49Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.012300 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.025455 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.043033 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.054916 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.067694 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.067978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.068010 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.068024 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.068040 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.068052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.079837 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.089473 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.089526 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qxsp8\" (UniqueName: \"kubernetes.io/projected/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-kube-api-access-qxsp8\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.090859 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.099621 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:50Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.170114 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.170152 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.170162 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.170177 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.170189 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.190992 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.191057 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qxsp8\" (UniqueName: \"kubernetes.io/projected/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-kube-api-access-qxsp8\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.191136 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.191203 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:50.691186514 +0000 UTC m=+37.706394048 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.207623 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qxsp8\" (UniqueName: \"kubernetes.io/projected/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-kube-api-access-qxsp8\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.222327 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" event={"ID":"051f5a57-faac-43af-9d4b-c83992dae9a2","Type":"ContainerStarted","Data":"4e72a3d54035454f173aedf7e6a37fc1e7be37342e3b38ccb3b041d49a9892cd"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.272221 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.272264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.272288 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.272311 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.272321 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.374932 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.374965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.374976 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.374989 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.374998 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.477163 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.477202 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.477212 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.477229 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.477241 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.579220 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.579262 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.579293 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.579310 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.579321 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.681552 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.681595 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.681611 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.681631 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.681646 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.696294 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.696392 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.696448 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:51.696434386 +0000 UTC m=+38.711641920 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.784291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.784327 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.784340 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.784353 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.784364 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.871559 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 11:56:30.397323144 +0000 UTC Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.886228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.886259 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.886360 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.886388 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.886397 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.933383 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.933454 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.933510 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:50 crc kubenswrapper[4703]: E0202 12:51:50.933787 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.988323 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.988360 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.988369 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.988383 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:50 crc kubenswrapper[4703]: I0202 12:51:50.988394 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:50Z","lastTransitionTime":"2026-02-02T12:51:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.090783 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.090832 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.090847 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.090865 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.090881 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.194062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.194097 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.194106 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.194134 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.194144 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.229290 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/1.log" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.229962 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/0.log" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.232498 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a" exitCode=1 Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.232576 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.232618 4703 scope.go:117] "RemoveContainer" containerID="79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.233436 4703 scope.go:117] "RemoveContainer" containerID="d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.233601 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.237765 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" event={"ID":"051f5a57-faac-43af-9d4b-c83992dae9a2","Type":"ContainerStarted","Data":"c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.237815 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" event={"ID":"051f5a57-faac-43af-9d4b-c83992dae9a2","Type":"ContainerStarted","Data":"c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.244957 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.257786 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.270774 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.284807 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.296783 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.296817 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.296826 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.296843 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.296855 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.299312 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.313343 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.325902 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.344439 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.356258 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-rbac-proxy ovnkube-cluster-manager]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.368890 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.380465 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.392399 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.399447 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.399516 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.399541 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.399577 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.399601 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.403086 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.416703 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.428063 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.439175 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.453197 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.465312 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.477486 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.490416 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.501993 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.502041 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.502052 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.502068 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.502080 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.503566 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.515088 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.529128 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.546787 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.556654 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.568378 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.578595 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.589222 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.598313 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.604048 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.604085 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.604098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.604115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.604127 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.609652 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.617703 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.617740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.617752 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.617768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.617780 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.619559 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.633337 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.633861 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.637546 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.637607 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.637617 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.637635 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.637646 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.649573 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.652916 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.652986 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.653006 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.653032 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.653045 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.664364 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.668797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.668860 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.668873 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.668897 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.668911 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.682609 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.686682 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.686726 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.686738 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.686760 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.686774 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.698673 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:51Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.698796 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.703847 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.703929 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.703954 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.703971 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.703993 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704031 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:52:07.704007867 +0000 UTC m=+54.719215411 (durationBeforeRetry 16s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.704058 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704082 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704115 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:53.7041069 +0000 UTC m=+40.719314434 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704140 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704175 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704185 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704196 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704180 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704176 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:07.704166712 +0000 UTC m=+54.719374256 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704217 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704387 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:07.704353458 +0000 UTC m=+54.719561182 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704409 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:07.704398459 +0000 UTC m=+54.719606233 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704417 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704441 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.704563 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:07.704538534 +0000 UTC m=+54.719746068 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.705717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.705745 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.705753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.705765 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.705776 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.808256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.808316 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.808325 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.808339 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.808351 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.872683 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-28 12:40:04.707505036 +0000 UTC Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.912145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.912213 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.912226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.912264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.912303 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:51Z","lastTransitionTime":"2026-02-02T12:51:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.933770 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:51 crc kubenswrapper[4703]: I0202 12:51:51.933773 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.934016 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:51 crc kubenswrapper[4703]: E0202 12:51:51.933899 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.015846 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.015895 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.015907 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.015928 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.015943 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.119379 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.119900 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.119910 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.119928 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.119942 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.222996 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.223056 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.223077 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.223106 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.223123 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.244658 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/1.log" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.326365 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.326403 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.326412 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.326425 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.326434 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.429898 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.429947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.429963 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.429983 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.430000 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.533217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.533266 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.533308 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.533325 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.533336 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.636079 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.636132 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.636145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.636161 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.636172 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.738888 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.738995 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.739013 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.739044 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.739063 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.842530 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.842603 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.842626 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.842665 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.842689 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.872859 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 14:44:30.408353389 +0000 UTC Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.933850 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.933958 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:52 crc kubenswrapper[4703]: E0202 12:51:52.934104 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:52 crc kubenswrapper[4703]: E0202 12:51:52.934235 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.945825 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.945915 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.945946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.945979 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:52 crc kubenswrapper[4703]: I0202 12:51:52.946003 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:52Z","lastTransitionTime":"2026-02-02T12:51:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.050104 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.050216 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.050230 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.050251 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.050267 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.158462 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.158516 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.158528 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.158549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.158560 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.260345 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.260395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.260404 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.260420 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.260461 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.364169 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.364246 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.364258 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.364318 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.364330 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.467717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.467775 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.467786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.467801 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.467810 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.570712 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.570775 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.570785 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.570805 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.570816 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.673654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.673735 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.673761 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.673793 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.673812 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.723492 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:53 crc kubenswrapper[4703]: E0202 12:51:53.723675 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:53 crc kubenswrapper[4703]: E0202 12:51:53.723748 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:51:57.723727702 +0000 UTC m=+44.738935236 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.776937 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.776985 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.776998 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.777016 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.777027 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.873885 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 07:17:05.638296992 +0000 UTC Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.880348 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.880388 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.880402 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.880426 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.880441 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.933800 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.933764 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:53 crc kubenswrapper[4703]: E0202 12:51:53.934136 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:53 crc kubenswrapper[4703]: E0202 12:51:53.934366 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.950393 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:53Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.967352 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:53Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.980664 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:53Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.983115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.983166 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.983182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.983206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.983221 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:53Z","lastTransitionTime":"2026-02-02T12:51:53Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:53 crc kubenswrapper[4703]: I0202 12:51:53.997551 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:53Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.016915 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.037474 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=kube-apiserver-check-endpoints pod=kube-apiserver-crc_openshift-kube-apiserver(f4b27818a5e8e43d0dc095d08835c792)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.055460 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.075684 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.086946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.087019 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.087035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.087062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.087077 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.095531 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.114248 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.137497 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.157426 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.177347 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.189261 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.189876 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.190015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.190303 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.190557 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.190722 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.204194 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.220609 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:54Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.294044 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.294542 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.294614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.294698 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.294763 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.398974 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.399055 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.399075 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.399105 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.399125 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.503121 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.503173 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.503184 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.503202 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.503214 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.606634 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.606740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.606772 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.606812 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.606838 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.710183 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.710329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.710350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.710385 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.710407 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.814568 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.814621 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.814632 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.814654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.814664 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.874383 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-07 06:35:52.015289989 +0000 UTC Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.917419 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.917457 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.917467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.917479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.917487 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:54Z","lastTransitionTime":"2026-02-02T12:51:54Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.932972 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.932991 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:54 crc kubenswrapper[4703]: E0202 12:51:54.933123 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:54 crc kubenswrapper[4703]: E0202 12:51:54.933160 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:54 crc kubenswrapper[4703]: I0202 12:51:54.933654 4703 scope.go:117] "RemoveContainer" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.020698 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.020739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.020748 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.020765 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.020777 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.122991 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.123045 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.123059 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.123083 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.123099 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.226607 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.226676 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.226697 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.226724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.226746 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.271191 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.274610 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"f4b27818a5e8e43d0dc095d08835c792","Type":"ContainerStarted","Data":"cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.275081 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.300455 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.322213 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.335964 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.336002 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.336013 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.336034 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.336045 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.342027 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.357725 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.374461 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.386847 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.399766 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.412963 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.427749 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.438760 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.438804 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.438817 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.438837 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.438850 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.443139 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.455892 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.470197 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.482120 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.496667 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.532617 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.541232 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.541267 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.541292 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.541305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.541315 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.548089 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:51:55Z is after 2025-08-24T17:21:41Z" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.643674 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.643703 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.643711 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.643723 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.643732 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.745692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.745729 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.745739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.745754 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.745765 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.848098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.848126 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.848134 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.848148 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.848157 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.875007 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-16 06:58:09.647215165 +0000 UTC Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.933027 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.933035 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:55 crc kubenswrapper[4703]: E0202 12:51:55.933176 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:55 crc kubenswrapper[4703]: E0202 12:51:55.933259 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.950835 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.950871 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.950883 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.950899 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:55 crc kubenswrapper[4703]: I0202 12:51:55.950913 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:55Z","lastTransitionTime":"2026-02-02T12:51:55Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.053453 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.053506 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.053519 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.053537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.053549 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.155737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.155769 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.155781 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.155797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.155807 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.258610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.258643 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.258654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.258669 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.258679 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.360645 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.360681 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.360690 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.360705 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.360714 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.463226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.463308 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.463321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.463344 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.463356 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.565393 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.565440 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.565449 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.565464 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.565474 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.667999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.668039 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.668049 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.668063 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.668072 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.771585 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.771625 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.771637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.771654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.771666 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.873995 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.874032 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.874043 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.874057 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.874066 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.875356 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 20:09:11.840451229 +0000 UTC Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.932936 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.932960 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:56 crc kubenswrapper[4703]: E0202 12:51:56.933086 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:56 crc kubenswrapper[4703]: E0202 12:51:56.933238 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.976787 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.976831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.976841 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.976856 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:56 crc kubenswrapper[4703]: I0202 12:51:56.976865 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:56Z","lastTransitionTime":"2026-02-02T12:51:56Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.079431 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.079478 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.079489 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.079506 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.079519 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.182686 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.182753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.182770 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.182800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.182819 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.284831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.284891 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.284901 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.284917 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.284931 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.387066 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.387105 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.387115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.387129 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.387141 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.489247 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.489334 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.489347 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.489364 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.489376 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.591990 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.592032 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.592074 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.592093 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.592106 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.694236 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.694296 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.694306 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.694321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.694330 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.762875 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:57 crc kubenswrapper[4703]: E0202 12:51:57.763015 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:57 crc kubenswrapper[4703]: E0202 12:51:57.763062 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:05.763049181 +0000 UTC m=+52.778256715 (durationBeforeRetry 8s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.796261 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.796324 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.796334 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.796351 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.796362 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.876207 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 22:57:44.127161325 +0000 UTC Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.897862 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.897899 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.897910 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.897931 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.897944 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:57Z","lastTransitionTime":"2026-02-02T12:51:57Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.933162 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:57 crc kubenswrapper[4703]: E0202 12:51:57.933299 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:57 crc kubenswrapper[4703]: I0202 12:51:57.933338 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:57 crc kubenswrapper[4703]: E0202 12:51:57.933417 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.000191 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.000222 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.000230 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.000243 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.000251 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.103706 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.103757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.103768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.103787 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.103801 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.206685 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.206730 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.206741 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.206758 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.206770 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.310876 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.310933 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.310950 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.310976 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.310995 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.414695 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.414740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.414754 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.414771 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.414782 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.516821 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.516927 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.516945 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.516973 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.516989 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.620857 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.620938 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.620954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.620980 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.620998 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.724678 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.724720 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.724731 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.724751 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.724767 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.828631 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.828737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.828749 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.828768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.828778 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.877158 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 05:26:04.71308376 +0000 UTC Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.932089 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.932156 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.932174 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.932200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.932217 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:58Z","lastTransitionTime":"2026-02-02T12:51:58Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.933023 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:51:58 crc kubenswrapper[4703]: I0202 12:51:58.933086 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:51:58 crc kubenswrapper[4703]: E0202 12:51:58.933244 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:51:58 crc kubenswrapper[4703]: E0202 12:51:58.933355 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.036043 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.036086 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.036095 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.036112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.036124 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.140105 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.140698 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.140880 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.141079 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.141362 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.245992 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.246071 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.246091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.246117 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.246137 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.350035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.350099 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.350116 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.350139 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.350158 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.453619 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.453683 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.453703 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.453727 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.453743 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.557560 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.557813 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.557971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.558070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.558166 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.660644 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.660681 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.660691 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.660705 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.660730 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.762808 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.762843 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.762851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.762864 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.762873 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.865540 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.865591 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.865608 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.865631 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.865647 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.877972 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-18 13:03:39.871691783 +0000 UTC Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.933244 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.933471 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:51:59 crc kubenswrapper[4703]: E0202 12:51:59.933545 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:51:59 crc kubenswrapper[4703]: E0202 12:51:59.933793 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.968140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.968194 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.968206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.968226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:51:59 crc kubenswrapper[4703]: I0202 12:51:59.968242 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:51:59Z","lastTransitionTime":"2026-02-02T12:51:59Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.071192 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.071239 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.071248 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.071265 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.071297 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.174166 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.174209 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.174225 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.174242 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.174254 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.276973 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.277026 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.277082 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.277106 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.277121 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.380751 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.380797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.380807 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.380820 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.380831 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.484150 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.484217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.484233 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.484256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.484301 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.588783 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.588849 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.588861 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.588882 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.588897 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.691071 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.691125 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.691137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.691157 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.691170 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.798410 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.798514 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.798537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.798572 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.798595 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.878083 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-11 05:04:56.959186914 +0000 UTC Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.902209 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.902266 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.902309 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.902332 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.902352 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:00Z","lastTransitionTime":"2026-02-02T12:52:00Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.932863 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:00 crc kubenswrapper[4703]: I0202 12:52:00.932904 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:00 crc kubenswrapper[4703]: E0202 12:52:00.933015 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:00 crc kubenswrapper[4703]: E0202 12:52:00.933209 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.005885 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.005949 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.005965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.005985 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.005999 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.109592 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.109662 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.109686 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.109717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.109740 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.212082 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.212131 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.212141 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.212156 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.212166 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.316058 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.316123 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.316142 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.316168 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.316186 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.418995 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.419139 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.419160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.419190 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.419237 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.522479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.522575 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.522596 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.522654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.522676 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.627150 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.627261 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.627329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.627373 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.627393 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.731446 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.731495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.731505 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.731524 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.731536 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.797610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.797657 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.797676 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.797701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.797717 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.818495 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:01Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.823538 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.823581 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.823592 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.823611 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.823626 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.838784 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:01Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.842739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.842786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.842797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.842816 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.842833 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.854855 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:01Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.859562 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.859641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.859662 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.859687 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.859706 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.874901 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:01Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.878185 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 14:12:20.069617692 +0000 UTC Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.878840 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.878914 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.878941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.878975 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.879000 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.897372 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:01Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:01Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.897515 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.899977 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.900151 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.900188 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.900302 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.900338 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:01Z","lastTransitionTime":"2026-02-02T12:52:01Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.932779 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:01 crc kubenswrapper[4703]: I0202 12:52:01.932851 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.932994 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:01 crc kubenswrapper[4703]: E0202 12:52:01.933160 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.003519 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.003598 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.003626 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.003658 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.003683 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.107344 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.107413 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.107431 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.107458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.107477 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.210687 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.210745 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.210772 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.210806 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.210830 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.313854 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.313929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.313941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.313962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.313973 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.417404 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.417491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.417512 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.417543 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.417561 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.521476 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.521536 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.521548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.521571 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.521585 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.624398 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.624435 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.624448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.624461 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.624471 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.726743 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.726787 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.726797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.726812 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.726824 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.829339 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.829401 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.829413 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.829431 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.829443 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.878920 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 17:40:52.606300501 +0000 UTC Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.931934 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.931978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.931989 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.932004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.932013 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:02Z","lastTransitionTime":"2026-02-02T12:52:02Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.933116 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:02 crc kubenswrapper[4703]: E0202 12:52:02.933210 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:02 crc kubenswrapper[4703]: I0202 12:52:02.933115 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:02 crc kubenswrapper[4703]: E0202 12:52:02.933303 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.034682 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.034737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.034746 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.034763 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.034775 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.136954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.137002 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.137058 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.137074 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.137083 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.239653 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.239727 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.239740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.239753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.239782 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.342551 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.342604 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.342623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.342645 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.342693 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.445454 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.445517 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.445526 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.445543 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.445553 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.547444 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.547494 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.547504 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.547518 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.547526 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.649131 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.649215 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.649228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.649245 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.649255 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.684638 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.691835 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler/openshift-kube-scheduler-crc"] Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.698111 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.708205 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.719885 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.728885 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.740151 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.749932 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.751531 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.751574 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.751589 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.751605 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.751614 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.760169 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.771651 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.784968 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.797532 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.810695 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.822539 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.835569 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.849547 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.853918 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.853959 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.853968 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.853985 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.853997 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.868754 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.879379 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 22:17:44.130163035 +0000 UTC Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.883002 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.933749 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.933777 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:03 crc kubenswrapper[4703]: E0202 12:52:03.933877 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:03 crc kubenswrapper[4703]: E0202 12:52:03.934122 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.947922 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.956252 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.956300 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.956313 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.956326 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.956335 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:03Z","lastTransitionTime":"2026-02-02T12:52:03Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.958496 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.970826 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.983662 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:03 crc kubenswrapper[4703]: I0202 12:52:03.995544 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:03Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.009122 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.019343 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.029744 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.043503 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.055572 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.058295 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.058325 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.058336 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.058354 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.058369 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.068868 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.081727 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.094125 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.111142 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://79fb9a143e80524868eb0711a21fec1136ea90edaeede7103948c4e81afc9c55\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:47Z\\\",\\\"message\\\":\\\" 5965 handler.go:208] Removed *v1.Pod event handler 3\\\\nI0202 12:51:47.763302 5965 handler.go:190] Sending *v1.NetworkPolicy event handler 4 for removal\\\\nI0202 12:51:47.763332 5965 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:47.763341 5965 handler.go:190] Sending *v1.Node event handler 7 for removal\\\\nI0202 12:51:47.763362 5965 handler.go:190] Sending *v1.EgressIP event handler 8 for removal\\\\nI0202 12:51:47.763405 5965 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:47.763436 5965 handler.go:190] Sending *v1.Namespace event handler 1 for removal\\\\nI0202 12:51:47.763437 5965 handler.go:208] Removed *v1.Node event handler 7\\\\nI0202 12:51:47.763444 5965 handler.go:190] Sending *v1.Namespace event handler 5 for removal\\\\nI0202 12:51:47.763461 5965 handler.go:208] Removed *v1.Namespace event handler 5\\\\nI0202 12:51:47.763476 5965 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:47.763479 5965 handler.go:208] Removed *v1.EgressIP event handler 8\\\\nI0202 12:51:47.763503 5965 handler.go:190] Sending *v1.EgressFirewall event handler 9 for removal\\\\nI0202 12:51:47.763413 5965 handler.go:208] Removed *v1.NetworkPolicy event handler 4\\\\nI0202 12:51:47.763536 5965 handler.go:208] Removed *v1.EgressFirewall event handler 9\\\\nI0202 12:51:47.763530 5965 factory.go:656] Stopping \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.122917 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.135725 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.149046 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:04Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.160079 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.160113 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.160123 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.160137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.160147 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.262064 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.262098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.262109 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.262124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.262136 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.364740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.364796 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.364811 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.364832 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.364851 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.466791 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.466827 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.466856 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.466869 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.466878 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.569448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.569514 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.569529 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.569546 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.569558 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.672495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.672557 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.672580 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.672610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.672631 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.774947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.774991 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.775004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.775021 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.775033 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.877496 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.877535 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.877545 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.877558 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.877567 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.880248 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 19:59:27.850622687 +0000 UTC Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.933750 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.933811 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:04 crc kubenswrapper[4703]: E0202 12:52:04.933873 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:04 crc kubenswrapper[4703]: E0202 12:52:04.933954 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.980424 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.980471 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.980490 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.980507 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:04 crc kubenswrapper[4703]: I0202 12:52:04.980520 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:04Z","lastTransitionTime":"2026-02-02T12:52:04Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.082650 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.082696 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.082706 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.082718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.082728 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.185734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.185786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.185797 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.185810 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.185819 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.288333 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.288392 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.288403 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.288420 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.288431 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.392125 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.392191 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.392200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.392220 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.392231 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.495192 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.495259 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.495291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.495316 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.495330 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.598888 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.598962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.598981 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.599011 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.599031 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.702885 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.702938 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.702950 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.702968 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.702979 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.805433 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.805472 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.805481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.805495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.805504 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.850248 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:05 crc kubenswrapper[4703]: E0202 12:52:05.850435 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:05 crc kubenswrapper[4703]: E0202 12:52:05.850497 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:21.850483664 +0000 UTC m=+68.865691198 (durationBeforeRetry 16s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.880594 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-22 03:42:50.757098981 +0000 UTC Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.907556 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.907599 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.907609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.907622 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.907632 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:05Z","lastTransitionTime":"2026-02-02T12:52:05Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.933324 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.933391 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:05 crc kubenswrapper[4703]: E0202 12:52:05.933544 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:05 crc kubenswrapper[4703]: E0202 12:52:05.934033 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.934600 4703 scope.go:117] "RemoveContainer" containerID="d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.949300 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:05Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.960550 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:05Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.971267 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:05Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.982325 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:05Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:05 crc kubenswrapper[4703]: I0202 12:52:05.994772 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:05Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.007431 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.010180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.010214 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.010223 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.010236 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.010248 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.019585 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.034579 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.047127 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.059131 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.071916 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.097025 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":1,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 10s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.111695 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.112511 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.112532 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.112569 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.112587 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.112599 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.129780 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.145079 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.158117 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.174393 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.215971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.216028 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.216042 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.216064 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.216080 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.316379 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/1.log" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.318514 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.319350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.319500 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.319573 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.319589 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.319816 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.320794 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.338536 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.358490 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.381491 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.400907 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422402 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422437 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422463 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422474 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.422961 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.438601 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.459507 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.474997 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.497834 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.510676 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.525324 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.525357 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.525366 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.525379 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.525388 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.528685 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.545211 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.563235 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.577651 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.589444 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.599398 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.609652 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:06Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.634701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.634733 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.634742 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.634757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.634770 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.737387 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.737415 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.737422 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.737435 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.737444 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.840032 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.840079 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.840089 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.840111 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.840121 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.881445 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-13 15:24:31.548584979 +0000 UTC Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.932871 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.932985 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:06 crc kubenswrapper[4703]: E0202 12:52:06.932993 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:06 crc kubenswrapper[4703]: E0202 12:52:06.933836 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.941985 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.942024 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.942035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.942052 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:06 crc kubenswrapper[4703]: I0202 12:52:06.942063 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:06Z","lastTransitionTime":"2026-02-02T12:52:06Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.044115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.044154 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.044164 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.044180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.044189 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.148003 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.148047 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.148059 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.148077 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.148091 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.250660 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.250708 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.250720 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.250737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.250748 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.324663 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/2.log" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.325444 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/1.log" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.331672 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" exitCode=1 Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.331727 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.331767 4703 scope.go:117] "RemoveContainer" containerID="d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.332601 4703 scope.go:117] "RemoveContainer" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.332782 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.347640 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.352984 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.353024 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.353033 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.353047 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.353056 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.360864 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.372918 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.384322 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.395741 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.405804 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.415678 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.429346 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.440706 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.452932 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-apiserver-check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.455563 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.455610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.455624 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.455642 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.455654 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.466637 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.480162 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.493214 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.507091 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.524103 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.538696 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.554305 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.558792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.559063 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.559075 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.559120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.559131 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.662116 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.662607 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.662707 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.663164 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.663237 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.766063 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.766098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.766108 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.766122 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.766149 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.768892 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.768991 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:52:39.768969878 +0000 UTC m=+86.784177412 (durationBeforeRetry 32s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.769121 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.769183 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.769235 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769302 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769302 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769321 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.769327 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769335 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769354 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769469 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769480 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769488 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769369 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:39.76935087 +0000 UTC m=+86.784558604 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769531 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:39.769521296 +0000 UTC m=+86.784728820 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769545 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:39.769539556 +0000 UTC m=+86.784747090 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.769568 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:39.769561007 +0000 UTC m=+86.784768541 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.868919 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.868986 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.868998 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.869027 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.869043 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.882255 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-07 12:50:31.247171761 +0000 UTC Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.887438 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.901047 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.913637 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.925809 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.933654 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.933745 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.933793 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:07 crc kubenswrapper[4703]: E0202 12:52:07.933871 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.940974 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.956364 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.969488 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.971586 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.971628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.971637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.971655 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.971670 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:07Z","lastTransitionTime":"2026-02-02T12:52:07Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.984087 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:07 crc kubenswrapper[4703]: I0202 12:52:07.996710 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:07Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.009014 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.022684 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.040160 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d85f1441cd3f3294d055142273e483a2e300a158f72836d0b4f6e5a38e47b68a\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:51:50Z\\\",\\\"message\\\":\\\".521265 6148 reflector.go:311] Stopping reflector *v1alpha1.AdminNetworkPolicy (0s) from sigs.k8s.io/network-policy-api/pkg/client/informers/externalversions/factory.go:141\\\\nI0202 12:51:49.521373 6148 handler.go:208] Removed *v1.Namespace event handler 1\\\\nI0202 12:51:49.521378 6148 reflector.go:311] Stopping reflector *v1.EgressIP (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressip/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.521565 6148 reflector.go:311] Stopping reflector *v1.EgressFirewall (0s) from github.com/openshift/ovn-kubernetes/go-controller/pkg/crd/egressfirewall/v1/apis/informers/externalversions/factory.go:140\\\\nI0202 12:51:49.522876 6148 handler.go:190] Sending *v1.Node event handler 2 for removal\\\\nI0202 12:51:49.522926 6148 factory.go:656] Stopping watch factory\\\\nI0202 12:51:49.522952 6148 handler.go:208] Removed *v1.Node event handler 2\\\\nI0202 12:51:49.537777 6148 shared_informer.go:320] Caches are synced for node-tracker-controller\\\\nI0202 12:51:49.537801 6148 services_controller.go:204] Setting up event handlers for services for network=default\\\\nI0202 12:51:49.537863 6148 ovnkube.go:599] Stopped ovnkube\\\\nI0202 12:51:49.537887 6148 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nF0202 12:51:49.537962 6148 ovnkube.go:137] failed to run ov\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:48Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.051545 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.066039 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.073490 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.073533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.073550 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.073573 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.073589 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.080018 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.103451 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.123189 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.144651 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.176251 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.176320 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.176331 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.176358 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.176370 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.278911 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.278953 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.278964 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.278980 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.278992 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.337171 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/2.log" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.340756 4703 scope.go:117] "RemoveContainer" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" Feb 02 12:52:08 crc kubenswrapper[4703]: E0202 12:52:08.340926 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.351150 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.363190 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.372421 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.381375 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.381418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.381426 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.381440 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.381448 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.386856 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.399560 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.411676 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.420875 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.433827 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.444352 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.456608 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.469917 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.483203 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.483239 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.483250 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.483266 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.483293 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.490950 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.501462 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.512743 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.523749 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.535451 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.544671 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:08Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.585703 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.585734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.585743 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.585758 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.585767 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.687554 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.687586 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.687596 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.687612 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.687623 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.790946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.791311 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.791323 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.791341 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.791353 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.883111 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 04:08:20.978276448 +0000 UTC Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.895327 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.895399 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.895418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.895445 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.895466 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.933683 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:08 crc kubenswrapper[4703]: E0202 12:52:08.933808 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.933943 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:08 crc kubenswrapper[4703]: E0202 12:52:08.934216 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.997802 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.997873 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.997891 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.997914 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:08 crc kubenswrapper[4703]: I0202 12:52:08.997931 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:08Z","lastTransitionTime":"2026-02-02T12:52:08Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.100902 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.100963 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.100979 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.100999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.101012 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.203702 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.203749 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.203761 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.203794 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.203805 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.306767 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.306822 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.306835 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.306852 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.306864 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.408814 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.409025 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.409115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.409180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.409300 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.511434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.511476 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.511491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.511511 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.511526 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.613787 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.614129 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.614248 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.614369 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.614455 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.717387 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.717692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.717800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.717904 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.718006 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.820755 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.821035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.821108 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.821185 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.821262 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.883457 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-11 20:06:38.466144274 +0000 UTC Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.923300 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.923549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.923637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.923722 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.923819 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:09Z","lastTransitionTime":"2026-02-02T12:52:09Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.933661 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:09 crc kubenswrapper[4703]: E0202 12:52:09.933774 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:09 crc kubenswrapper[4703]: I0202 12:52:09.933660 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:09 crc kubenswrapper[4703]: E0202 12:52:09.933876 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.026665 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.026744 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.026768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.026806 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.026825 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.129397 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.129438 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.129450 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.129467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.129479 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.232485 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.232526 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.232537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.232570 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.232580 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.334859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.334901 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.334922 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.334939 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.334959 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.438469 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.438527 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.438537 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.438558 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.438569 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.540825 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.540870 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.540881 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.540899 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.540909 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.643014 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.643050 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.643061 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.643077 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.643089 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.745541 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.745576 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.745587 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.745600 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.745610 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.848491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.848883 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.849015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.849124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.849236 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.883635 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-23 15:20:59.082382932 +0000 UTC Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.933574 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.933596 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:10 crc kubenswrapper[4703]: E0202 12:52:10.933700 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:10 crc kubenswrapper[4703]: E0202 12:52:10.933832 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.952304 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.952543 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.952612 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.952814 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:10 crc kubenswrapper[4703]: I0202 12:52:10.952900 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:10Z","lastTransitionTime":"2026-02-02T12:52:10Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.055334 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.055637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.055757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.055888 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.056010 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.158963 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.159243 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.159337 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.159405 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.159469 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.261350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.261387 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.261395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.261408 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.261417 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.363606 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.363859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.363939 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.364022 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.364096 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.467293 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.467596 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.467676 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.467767 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.467840 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.569913 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.569949 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.569971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.569988 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.570000 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.672844 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.672898 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.672914 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.672933 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.672944 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.775246 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.775316 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.775326 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.775346 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.775359 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.877017 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.877048 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.877057 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.877070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.877079 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.884207 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 22:37:41.21278885 +0000 UTC Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.932945 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.933057 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:11 crc kubenswrapper[4703]: E0202 12:52:11.933447 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:11 crc kubenswrapper[4703]: E0202 12:52:11.933545 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.979724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.979766 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.979777 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.979792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:11 crc kubenswrapper[4703]: I0202 12:52:11.979802 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:11Z","lastTransitionTime":"2026-02-02T12:52:11Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.082408 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.082456 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.082467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.082484 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.082497 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.184728 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.184756 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.184764 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.184778 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.184786 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.203055 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.203091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.203100 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.203114 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.203129 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.215300 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:12Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.218381 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.218418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.218429 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.218444 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.218454 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.229630 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:12Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.233137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.233180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.233191 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.233206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.233215 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.244825 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:12Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.248741 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.248777 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.248790 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.248807 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.248818 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.261130 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:12Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.268053 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.268090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.268100 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.268115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.268126 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.280738 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:12Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:12Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.280892 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.287401 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.287496 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.287510 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.287531 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.287542 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.389908 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.389963 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.389980 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.390004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.390022 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.491964 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.492179 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.492305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.492383 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.492456 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.594527 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.594560 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.594569 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.594586 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.594603 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.697443 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.697478 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.697488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.697502 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.697510 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.799929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.799972 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.799991 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.800007 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.800019 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.884925 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 16:47:07.886346282 +0000 UTC Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.902699 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.902736 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.902747 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.902762 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.902773 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:12Z","lastTransitionTime":"2026-02-02T12:52:12Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.933436 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:12 crc kubenswrapper[4703]: I0202 12:52:12.933466 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.933595 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:12 crc kubenswrapper[4703]: E0202 12:52:12.933691 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.005000 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.005039 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.005051 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.005068 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.005079 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.107328 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.107385 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.107404 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.107427 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.107441 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.210534 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.210601 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.210614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.210634 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.210646 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.313099 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.313144 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.313154 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.313169 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.313187 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.415623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.415658 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.415666 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.415678 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.415687 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.518075 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.518107 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.518116 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.518130 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.518142 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.621479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.621528 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.621545 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.621562 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.621574 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.725255 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.725334 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.725351 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.725377 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.725393 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.827735 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.827789 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.827800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.827819 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.827835 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.885814 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-30 17:13:21.040287534 +0000 UTC Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.929690 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.929723 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.929733 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.929747 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.929757 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:13Z","lastTransitionTime":"2026-02-02T12:52:13Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.933162 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:13 crc kubenswrapper[4703]: E0202 12:52:13.933300 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.933322 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:13 crc kubenswrapper[4703]: E0202 12:52:13.933450 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.947247 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.958991 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.971020 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.980584 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:13 crc kubenswrapper[4703]: I0202 12:52:13.989590 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.001695 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:13Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.010923 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.023915 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.031683 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.031718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.031728 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.031741 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.031751 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.037573 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.050894 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.064081 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.080977 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.095232 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.109995 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.127746 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.134707 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.134739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.134750 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.134768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.134777 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.149194 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.169257 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:14Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.237001 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.237045 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.237065 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.237087 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.237099 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.339120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.339155 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.339165 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.339182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.339193 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.441089 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.441391 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.441500 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.441618 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.441766 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.544804 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.544846 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.544855 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.544872 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.544883 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.647585 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.647617 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.647626 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.647638 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.647648 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.750631 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.750675 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.750683 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.750697 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.750707 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.852769 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.852855 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.852867 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.852883 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.852896 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.886236 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 17:41:58.011288865 +0000 UTC Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.932760 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:14 crc kubenswrapper[4703]: E0202 12:52:14.932927 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.932795 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:14 crc kubenswrapper[4703]: E0202 12:52:14.933000 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.955399 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.955448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.955461 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.955481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:14 crc kubenswrapper[4703]: I0202 12:52:14.955493 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:14Z","lastTransitionTime":"2026-02-02T12:52:14Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.057843 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.057913 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.057925 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.057943 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.057954 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.159947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.159983 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.159992 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.160004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.160013 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.261890 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.261927 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.261936 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.261950 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.261964 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.364255 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.364322 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.364334 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.364350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.364363 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.467629 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.467682 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.467695 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.467715 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.467727 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.570366 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.570425 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.570437 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.570465 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.570478 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.673644 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.673702 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.673716 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.673737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.673754 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.776136 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.776175 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.776183 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.776197 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.776207 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.878372 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.878427 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.878448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.878468 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.878480 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.886563 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-12 07:34:29.324888242 +0000 UTC Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.933295 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:15 crc kubenswrapper[4703]: E0202 12:52:15.933489 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.933540 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:15 crc kubenswrapper[4703]: E0202 12:52:15.933686 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.981204 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.981238 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.981250 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.981265 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:15 crc kubenswrapper[4703]: I0202 12:52:15.981300 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:15Z","lastTransitionTime":"2026-02-02T12:52:15Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.084090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.084140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.084153 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.084173 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.084186 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.187027 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.187396 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.187498 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.187606 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.187702 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.289909 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.290226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.290347 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.290458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.290551 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.399680 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.399716 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.399727 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.399744 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.399759 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.502100 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.502141 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.502153 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.502167 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.502176 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.604718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.605051 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.605067 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.605091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.605106 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.711110 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.711152 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.711162 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.711176 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.711184 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.814027 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.814087 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.814098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.814114 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.814125 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.886932 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-17 22:54:17.762354827 +0000 UTC Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.916455 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.916491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.916503 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.916521 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.916535 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:16Z","lastTransitionTime":"2026-02-02T12:52:16Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.932985 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:16 crc kubenswrapper[4703]: I0202 12:52:16.932984 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:16 crc kubenswrapper[4703]: E0202 12:52:16.933119 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:16 crc kubenswrapper[4703]: E0202 12:52:16.933195 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.018851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.018973 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.018994 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.019013 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.019025 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.121052 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.121082 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.121090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.121103 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.121112 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.223584 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.223622 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.223634 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.223649 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.223661 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.325773 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.325818 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.325830 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.325845 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.325856 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.428241 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.428287 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.428304 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.428319 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.428330 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.530773 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.530829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.530842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.530858 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.530869 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.633189 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.633221 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.633235 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.633258 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.633289 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.735500 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.735549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.735560 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.735578 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.735590 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.838556 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.838610 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.838622 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.838637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.838667 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.887280 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-01 04:57:45.293916781 +0000 UTC Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.933602 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.933729 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:17 crc kubenswrapper[4703]: E0202 12:52:17.933843 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:17 crc kubenswrapper[4703]: E0202 12:52:17.934636 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.940815 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.940842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.940850 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.940862 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:17 crc kubenswrapper[4703]: I0202 12:52:17.940870 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:17Z","lastTransitionTime":"2026-02-02T12:52:17Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.043933 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.043970 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.043983 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.043999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.044011 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.146870 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.146904 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.146913 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.146929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.146939 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.249506 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.249533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.249541 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.249554 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.249562 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.351820 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.351859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.351867 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.351883 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.351892 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.454562 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.454605 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.454614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.454628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.454638 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.557431 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.557464 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.557475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.557489 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.557499 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.660301 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.660358 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.660371 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.660405 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.660416 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.762742 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.762788 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.762800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.762818 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.762829 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.864948 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.864995 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.865003 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.865019 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.865031 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.887470 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 00:14:31.046460786 +0000 UTC Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.932964 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.933051 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:18 crc kubenswrapper[4703]: E0202 12:52:18.933120 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:18 crc kubenswrapper[4703]: E0202 12:52:18.933173 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.967434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.967465 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.967475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.967488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:18 crc kubenswrapper[4703]: I0202 12:52:18.967500 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:18Z","lastTransitionTime":"2026-02-02T12:52:18Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.069561 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.069609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.069619 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.069635 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.069646 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.171710 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.171752 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.171762 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.171780 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.171790 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.274014 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.274057 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.274067 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.274081 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.274090 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.375957 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.375995 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.376006 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.376021 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.376035 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.477971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.478010 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.478018 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.478031 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.478042 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.579793 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.579842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.579855 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.579873 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.579884 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.681972 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.682008 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.682019 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.682033 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.682042 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.784453 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.784490 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.784502 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.784518 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.784527 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887561 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 03:26:51.99552825 +0000 UTC Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887665 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887730 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.887766 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.933260 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.933301 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:19 crc kubenswrapper[4703]: E0202 12:52:19.933432 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:19 crc kubenswrapper[4703]: E0202 12:52:19.933531 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.989852 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.989902 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.989916 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.989933 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:19 crc kubenswrapper[4703]: I0202 12:52:19.989945 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:19Z","lastTransitionTime":"2026-02-02T12:52:19Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.091899 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.091937 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.091947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.091962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.091973 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.193767 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.193794 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.193802 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.193815 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.193823 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.296098 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.296132 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.296145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.296160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.296171 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.398657 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.398690 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.398699 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.398713 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.398722 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.500814 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.500863 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.500878 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.500898 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.500910 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.602945 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.602988 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.603000 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.603017 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.603032 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.705487 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.705515 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.705524 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.705538 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.705547 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.808180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.808219 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.808228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.808240 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.808250 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.888344 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-17 17:01:06.013496733 +0000 UTC Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.910327 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.910364 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.910374 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.910392 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.910402 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:20Z","lastTransitionTime":"2026-02-02T12:52:20Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.933139 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:20 crc kubenswrapper[4703]: I0202 12:52:20.933162 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:20 crc kubenswrapper[4703]: E0202 12:52:20.933309 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:20 crc kubenswrapper[4703]: E0202 12:52:20.933398 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.013351 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.013395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.013407 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.013426 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.013437 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.115309 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.115347 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.115356 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.115372 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.115382 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.217694 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.217725 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.217735 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.217751 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.217761 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.319803 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.319835 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.319844 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.319856 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.319866 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.421765 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.421807 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.421819 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.421835 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.421846 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.523980 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.524015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.524025 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.524041 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.524052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.626603 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.626954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.627048 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.627147 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.627250 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.729852 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.730187 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.730259 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.730360 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.730634 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.833113 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.833185 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.833198 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.833216 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.833229 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.888460 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-15 00:56:55.012676415 +0000 UTC Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.917454 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:21 crc kubenswrapper[4703]: E0202 12:52:21.917593 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:21 crc kubenswrapper[4703]: E0202 12:52:21.917647 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:52:53.917628512 +0000 UTC m=+100.932836046 (durationBeforeRetry 32s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.933819 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.933840 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:21 crc kubenswrapper[4703]: E0202 12:52:21.934535 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:21 crc kubenswrapper[4703]: E0202 12:52:21.934647 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.934824 4703 scope.go:117] "RemoveContainer" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.935259 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.935313 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.935324 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.935339 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:21 crc kubenswrapper[4703]: I0202 12:52:21.935351 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:21Z","lastTransitionTime":"2026-02-02T12:52:21Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:21 crc kubenswrapper[4703]: E0202 12:52:21.935386 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.037603 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.037651 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.037663 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.037681 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.037693 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.140686 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.140715 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.140725 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.140740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.140752 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.243226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.243329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.243344 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.243363 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.243398 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.300863 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.301125 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.301207 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.301325 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.301436 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.313797 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:22Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.316517 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.316551 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.316564 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.316578 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.316588 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.327762 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:22Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.330831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.330870 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.330881 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.330898 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.330909 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.343491 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:22Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.348121 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.348162 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.348173 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.348189 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.348202 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.359554 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:22Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.362748 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.362918 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.362999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.363101 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.363193 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.376105 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:22Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:22Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.376221 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.377805 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.377839 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.377852 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.377869 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.377880 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.480434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.480471 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.480481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.480497 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.480508 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.582968 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.583015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.583025 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.583039 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.583048 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.686129 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.686453 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.686544 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.686627 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.686695 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.789856 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.789907 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.789917 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.789931 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.789942 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.889827 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 14:50:52.222279575 +0000 UTC Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.892706 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.892739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.892748 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.892776 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.892796 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.933420 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.933513 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.933563 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:22 crc kubenswrapper[4703]: E0202 12:52:22.933645 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.995475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.995516 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.995529 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.995547 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:22 crc kubenswrapper[4703]: I0202 12:52:22.995563 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:22Z","lastTransitionTime":"2026-02-02T12:52:22Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.097747 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.097798 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.097811 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.097831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.097842 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.200253 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.200302 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.200313 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.200328 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.200341 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.302904 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.302941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.302952 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.302967 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.302979 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.405570 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.405637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.405659 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.405685 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.405702 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.509068 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.509121 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.509140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.509160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.509172 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.611843 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.612118 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.612182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.612254 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.612353 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.715363 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.715628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.715704 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.715785 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.715858 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.818989 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.819031 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.819040 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.819063 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.819073 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.890284 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-03 09:01:26.20504342 +0000 UTC Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.921939 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.922155 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.922258 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.922397 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.922483 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:23Z","lastTransitionTime":"2026-02-02T12:52:23Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.933430 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.933489 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:23 crc kubenswrapper[4703]: E0202 12:52:23.933568 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:23 crc kubenswrapper[4703]: E0202 12:52:23.933683 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.946404 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:23Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.965772 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:23Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.977758 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:23Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:23 crc kubenswrapper[4703]: I0202 12:52:23.990124 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:23Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.003024 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.012740 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.025092 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.025133 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.025144 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.025161 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.025176 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.028255 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.040235 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.052722 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.063638 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.075512 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.087232 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.099042 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.113465 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.127086 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.127112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.127122 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.127137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.127149 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.131467 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.142838 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.156084 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:24Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.229699 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.229742 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.229796 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.229813 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.229829 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.332301 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.332330 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.332338 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.332351 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.332360 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.434846 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.434913 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.434925 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.434940 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.434950 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.537529 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.538011 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.538104 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.538192 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.538331 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.640366 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.640405 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.640418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.640434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.640445 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.743097 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.743135 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.743145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.743160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.743173 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.845693 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.845724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.845733 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.845747 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.845767 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.891380 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-10 08:52:13.830068102 +0000 UTC Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.932914 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.932928 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:24 crc kubenswrapper[4703]: E0202 12:52:24.933048 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:24 crc kubenswrapper[4703]: E0202 12:52:24.933161 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.950213 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.950264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.950295 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.950321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:24 crc kubenswrapper[4703]: I0202 12:52:24.950334 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:24Z","lastTransitionTime":"2026-02-02T12:52:24Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.053444 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.053483 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.053493 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.053509 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.053518 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.155408 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.155443 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.155454 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.155469 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.155480 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.259551 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.259615 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.259626 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.259641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.259651 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.362062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.362097 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.362105 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.362145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.362155 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.391120 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/0.log" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.391174 4703 generic.go:334] "Generic (PLEG): container finished" podID="5fe22056-9a8b-4eba-8776-c50531078e2f" containerID="eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb" exitCode=1 Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.391203 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerDied","Data":"eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.391551 4703 scope.go:117] "RemoveContainer" containerID="eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.406700 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.418338 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.430419 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.447208 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.462303 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"containers with unready status: [kube-multus]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"2026-02-02T12:51:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc\\\\n2026-02-02T12:51:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc to /host/opt/cni/bin/\\\\n2026-02-02T12:51:40Z [verbose] multus-daemon started\\\\n2026-02-02T12:51:40Z [verbose] Readiness Indicator file check\\\\n2026-02-02T12:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.465253 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.465291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.465302 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.465317 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.465330 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.475071 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.487731 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.500310 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.511919 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.522512 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.537266 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.553492 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.563357 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.566851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.566883 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.566893 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.566910 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.566921 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.573675 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.583571 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.593633 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.601803 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:25Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.669769 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.669819 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.669829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.669844 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.669855 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.772549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.772821 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.772909 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.772987 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.773052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.875668 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.875708 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.875718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.875736 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.875746 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.892135 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-06 07:00:18.316882419 +0000 UTC Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.933537 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.933649 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:25 crc kubenswrapper[4703]: E0202 12:52:25.933714 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:25 crc kubenswrapper[4703]: E0202 12:52:25.933739 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.978075 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.978111 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.978120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.978133 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:25 crc kubenswrapper[4703]: I0202 12:52:25.978142 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:25Z","lastTransitionTime":"2026-02-02T12:52:25Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.080631 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.080667 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.080677 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.080692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.080702 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.183181 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.183214 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.183223 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.183236 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.183246 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.285990 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.286041 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.286054 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.286072 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.286084 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.388160 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.388208 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.388217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.388232 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.388240 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.395605 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/0.log" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.395659 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerStarted","Data":"9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.447882 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.462588 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.480856 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.490681 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.490728 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.490738 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.490757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.490767 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.493290 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.507196 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.518084 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.527863 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.540055 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.552602 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.565824 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.579524 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"2026-02-02T12:51:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc\\\\n2026-02-02T12:51:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc to /host/opt/cni/bin/\\\\n2026-02-02T12:51:40Z [verbose] multus-daemon started\\\\n2026-02-02T12:51:40Z [verbose] Readiness Indicator file check\\\\n2026-02-02T12:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.591297 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.593117 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.593247 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.593350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.593428 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.593489 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.602582 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.615903 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.630975 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.650779 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.664826 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:26Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.696350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.696404 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.696424 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.696450 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.696467 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.798902 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.798954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.798964 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.798985 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.798996 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.892268 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-23 21:15:28.597321029 +0000 UTC Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.901965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.902260 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.902389 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.902504 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.902593 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:26Z","lastTransitionTime":"2026-02-02T12:52:26Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.932945 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:26 crc kubenswrapper[4703]: I0202 12:52:26.933016 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:26 crc kubenswrapper[4703]: E0202 12:52:26.933164 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:26 crc kubenswrapper[4703]: E0202 12:52:26.933332 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.006040 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.006122 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.006140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.006550 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.006583 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.110579 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.110620 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.110630 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.110646 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.110656 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.213058 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.213091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.213101 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.213116 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.213128 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.314766 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.314792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.314800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.314811 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.314820 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.417408 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.417445 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.417458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.417474 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.417485 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.519575 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.519615 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.519627 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.519643 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.519659 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.622212 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.622256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.622283 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.622299 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.622309 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.724441 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.724481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.724491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.724505 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.724517 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.827335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.827383 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.827395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.827412 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.827423 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.892940 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-25 22:55:13.676614942 +0000 UTC Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.930341 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.930386 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.930395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.930408 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.930417 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:27Z","lastTransitionTime":"2026-02-02T12:52:27Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.933037 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:27 crc kubenswrapper[4703]: I0202 12:52:27.933064 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:27 crc kubenswrapper[4703]: E0202 12:52:27.933314 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:27 crc kubenswrapper[4703]: E0202 12:52:27.933428 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.032729 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.032769 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.032778 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.032794 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.032805 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.134939 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.134978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.134987 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.135001 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.135011 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.237419 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.237477 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.237488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.237511 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.237529 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.340941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.340992 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.341004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.341030 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.341052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.443935 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.444073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.444085 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.444107 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.444116 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.547516 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.547565 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.547577 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.547596 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.547611 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.650579 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.650623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.650632 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.650647 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.650657 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.753804 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.753857 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.753870 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.753892 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.753905 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.857521 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.857572 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.857582 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.857602 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.857612 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.894067 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-02 10:30:13.914557628 +0000 UTC Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.933752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.934040 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:28 crc kubenswrapper[4703]: E0202 12:52:28.934191 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:28 crc kubenswrapper[4703]: E0202 12:52:28.934409 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.960263 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.960336 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.960354 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.960382 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:28 crc kubenswrapper[4703]: I0202 12:52:28.960397 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:28Z","lastTransitionTime":"2026-02-02T12:52:28Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.063851 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.063902 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.063919 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.063940 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.063953 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.167392 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.167445 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.167459 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.167481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.167496 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.269922 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.269965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.269976 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.269994 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.270004 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.372669 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.372722 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.372734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.372751 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.372763 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.475444 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.475483 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.475491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.475508 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.475518 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.578575 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.578675 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.578689 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.578716 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.578734 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.682068 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.682124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.682135 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.682157 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.682171 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.784746 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.784808 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.784822 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.784842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.784856 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.888182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.888257 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.888292 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.888321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.888337 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.894744 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-27 07:04:34.021118211 +0000 UTC Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.933735 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.933871 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:29 crc kubenswrapper[4703]: E0202 12:52:29.933962 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:29 crc kubenswrapper[4703]: E0202 12:52:29.934122 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.991977 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.992043 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.992062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.992090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:29 crc kubenswrapper[4703]: I0202 12:52:29.992109 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:29Z","lastTransitionTime":"2026-02-02T12:52:29Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.095242 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.095347 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.095363 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.095387 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.095400 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.198567 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.198606 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.198614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.198628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.198638 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.302177 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.302267 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.302302 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.302327 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.302344 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.405193 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.405239 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.405248 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.405298 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.405311 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.507687 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.507729 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.507740 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.507754 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.507765 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.611168 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.611256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.611314 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.611346 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.611367 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.714725 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.714795 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.714808 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.714833 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.714846 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.817841 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.817893 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.817907 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.817929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.817944 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.895119 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-08 14:09:48.873998026 +0000 UTC Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.921004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.921042 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.921052 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.921064 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.921072 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:30Z","lastTransitionTime":"2026-02-02T12:52:30Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.933053 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:30 crc kubenswrapper[4703]: I0202 12:52:30.933190 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:30 crc kubenswrapper[4703]: E0202 12:52:30.933210 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:30 crc kubenswrapper[4703]: E0202 12:52:30.933482 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.024146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.024264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.024351 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.024387 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.024412 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.127168 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.127534 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.127663 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.127763 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.127850 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.230962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.231024 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.231037 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.231054 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.231065 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.333885 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.333956 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.333975 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.334004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.334026 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.436144 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.436185 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.436194 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.436207 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.436215 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.538771 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.539138 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.539323 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.539487 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.539631 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.642650 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.642908 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.643011 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.643106 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.643185 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.745764 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.745805 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.745815 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.745831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.745844 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.848328 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.848614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.848845 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.848940 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.849000 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.895287 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-30 11:41:46.459518302 +0000 UTC Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.932908 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.933039 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:31 crc kubenswrapper[4703]: E0202 12:52:31.933234 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:31 crc kubenswrapper[4703]: E0202 12:52:31.933360 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.950999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.951035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.951044 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.951057 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:31 crc kubenswrapper[4703]: I0202 12:52:31.951069 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:31Z","lastTransitionTime":"2026-02-02T12:52:31Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.052917 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.052962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.052975 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.052991 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.053006 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.155228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.155289 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.155317 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.155335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.155346 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.257753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.257794 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.257805 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.257818 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.257828 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.360167 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.360200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.360209 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.360223 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.360232 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.385692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.385729 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.385737 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.385752 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.385765 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.401814 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:32Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.406360 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.406421 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.406435 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.406451 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.406463 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.418761 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:32Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.422227 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.422264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.422290 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.422305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.422314 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.438605 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:32Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.442303 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.442340 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.442349 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.442363 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.442372 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.455923 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:32Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.459118 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.459146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.459180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.459196 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.459207 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.472230 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:32Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"},\\\"runtimeHandlers\\\":[{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":false},\\\"name\\\":\\\"runc\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"crun\\\"},{\\\"features\\\":{\\\"recursiveReadOnlyMounts\\\":true,\\\"userNamespaces\\\":true},\\\"name\\\":\\\"\\\"}]}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:32Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.472386 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.473901 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.473930 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.473940 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.473954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.473970 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.576130 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.576217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.576237 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.576256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.576299 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.678594 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.678633 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.678643 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.678658 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.678669 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.780662 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.780701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.780710 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.780723 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.780733 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.882544 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.882583 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.882596 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.882622 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.882636 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.895849 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-04 00:35:28.785568663 +0000 UTC Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.933351 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.933451 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.933557 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:32 crc kubenswrapper[4703]: E0202 12:52:32.933732 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.985112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.985154 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.985166 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.985184 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:32 crc kubenswrapper[4703]: I0202 12:52:32.985196 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:32Z","lastTransitionTime":"2026-02-02T12:52:32Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.087226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.087332 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.087358 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.087389 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.087412 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.190155 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.190193 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.190203 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.190217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.190229 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.292454 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.292488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.292499 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.292512 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.292522 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.395490 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.395533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.395544 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.395561 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.395574 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.498070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.498116 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.498128 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.498146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.498159 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.600825 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.600871 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.600881 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.600897 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.600907 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.703378 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.703421 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.703433 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.703449 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.703461 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.806343 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.806399 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.806411 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.806430 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.806445 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.896323 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-25 21:55:37.566378801 +0000 UTC Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.909062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.909126 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.909144 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.909171 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.909187 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:33Z","lastTransitionTime":"2026-02-02T12:52:33Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.933610 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:33 crc kubenswrapper[4703]: E0202 12:52:33.933714 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.933613 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:33 crc kubenswrapper[4703]: E0202 12:52:33.933908 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.947094 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:33Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.961803 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:33Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.978004 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:33Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:33 crc kubenswrapper[4703]: I0202 12:52:33.989960 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:33Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.002383 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.011475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.011524 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.011533 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.011548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.011557 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.017559 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.033082 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.050897 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.065138 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.078574 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.091575 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.105615 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"2026-02-02T12:51:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc\\\\n2026-02-02T12:51:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc to /host/opt/cni/bin/\\\\n2026-02-02T12:51:40Z [verbose] multus-daemon started\\\\n2026-02-02T12:51:40Z [verbose] Readiness Indicator file check\\\\n2026-02-02T12:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.116946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.116986 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.117001 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.117026 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.117040 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.119521 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.131982 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.146660 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.174211 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":2,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"message\\\":\\\"back-off 20s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\\\",\\\"reason\\\":\\\"CrashLoopBackOff\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.188837 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:34Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.220082 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.220200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.220212 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.220226 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.220236 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.322837 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.322885 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.322894 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.322911 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.322936 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.424327 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.424390 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.424403 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.424423 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.424437 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.527829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.527879 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.527890 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.527911 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.527926 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.632145 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.632184 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.632196 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.632216 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.632226 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.734556 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.734614 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.734623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.734639 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.734648 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.836795 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.836833 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.836840 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.836854 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.836864 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.897364 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-19 21:22:33.402710143 +0000 UTC Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.933024 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.933079 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:34 crc kubenswrapper[4703]: E0202 12:52:34.933173 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:34 crc kubenswrapper[4703]: E0202 12:52:34.933312 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.938965 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.939014 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.939027 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.939044 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:34 crc kubenswrapper[4703]: I0202 12:52:34.939055 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:34Z","lastTransitionTime":"2026-02-02T12:52:34Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.042927 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.042988 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.043004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.043025 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.043068 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.146119 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.146184 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.146197 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.146220 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.146241 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.248880 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.248941 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.248969 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.251263 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.251310 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.354070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.354101 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.354112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.354124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.354133 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.455919 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.455997 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.456015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.456039 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.456056 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.559018 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.559073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.559090 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.559112 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.559129 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.661847 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.661912 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.661936 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.661981 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.662005 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.765000 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.765038 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.765049 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.765065 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.765077 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.866947 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.866988 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.866999 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.867014 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.867028 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.897615 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 00:58:05.812727266 +0000 UTC Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.933302 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.933331 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:35 crc kubenswrapper[4703]: E0202 12:52:35.933484 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:35 crc kubenswrapper[4703]: E0202 12:52:35.933562 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.934143 4703 scope.go:117] "RemoveContainer" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.969988 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.970051 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.970062 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.970076 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:35 crc kubenswrapper[4703]: I0202 12:52:35.970085 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:35Z","lastTransitionTime":"2026-02-02T12:52:35Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.072767 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.072826 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.072844 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.072868 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.072886 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.175079 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.175115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.175123 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.175137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.175146 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.277488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.277539 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.277548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.277563 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.277576 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.379778 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.379829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.379840 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.379859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.379872 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.482491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.482536 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.482548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.482564 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.482577 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.585230 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.585258 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.585288 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.585307 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.585321 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.687664 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.687711 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.687723 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.687739 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.687752 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.789689 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.789722 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.789734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.789750 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.789764 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.891768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.891798 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.891807 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.891820 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.891830 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.897973 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 19:54:52.704213353 +0000 UTC Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.933514 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.933499 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:36 crc kubenswrapper[4703]: E0202 12:52:36.933640 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:36 crc kubenswrapper[4703]: E0202 12:52:36.933705 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.994591 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.994632 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.994641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.994656 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:36 crc kubenswrapper[4703]: I0202 12:52:36.994665 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:36Z","lastTransitionTime":"2026-02-02T12:52:36Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.096448 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.096479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.096491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.096506 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.096544 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.198980 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.199012 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.199021 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.199041 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.199052 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.301705 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.301756 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.301765 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.301795 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.301805 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.403486 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.403524 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.403534 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.403548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.403557 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.426340 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/3.log" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.427051 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/2.log" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.429517 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" exitCode=1 Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.429590 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.429708 4703 scope.go:117] "RemoveContainer" containerID="538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.430286 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 12:52:37 crc kubenswrapper[4703]: E0202 12:52:37.430521 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.443091 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.456479 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.466477 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.477602 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.489539 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.503420 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.505719 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.505755 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.505764 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.505778 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.505788 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.516328 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.527981 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.538801 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.551659 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"2026-02-02T12:51:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc\\\\n2026-02-02T12:51:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc to /host/opt/cni/bin/\\\\n2026-02-02T12:51:40Z [verbose] multus-daemon started\\\\n2026-02-02T12:51:40Z [verbose] Readiness Indicator file check\\\\n2026-02-02T12:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.568389 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:37Z\\\",\\\"message\\\":\\\"e column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 12:52:37.179414 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0202 12:52:37.179446 6825 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:37.179483 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.577586 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.590967 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.603390 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.607513 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.607548 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.607562 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.607576 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.607587 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.614963 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.624731 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.634173 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:37Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.709674 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.709717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.709727 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.709742 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.709755 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.812664 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.812704 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.812713 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.812730 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.812741 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.898218 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-01 05:08:32.44959293 +0000 UTC Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.915185 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.915221 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.915231 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.915246 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.915256 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:37Z","lastTransitionTime":"2026-02-02T12:52:37Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.933860 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:37 crc kubenswrapper[4703]: I0202 12:52:37.933900 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:37 crc kubenswrapper[4703]: E0202 12:52:37.934024 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:37 crc kubenswrapper[4703]: E0202 12:52:37.934100 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.018200 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.018233 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.018242 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.018255 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.018265 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.120608 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.120806 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.120817 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.120831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.120842 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.223415 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.223468 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.223481 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.223503 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.223517 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.326338 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.326373 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.326382 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.326398 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.326409 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.428701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.428743 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.428753 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.428773 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.428784 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.433503 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/3.log" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.532132 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.532182 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.532195 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.532209 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.532222 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.634531 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.634564 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.634574 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.634587 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.634597 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.737783 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.737850 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.737863 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.737910 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.737927 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.841264 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.841328 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.841339 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.841356 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.841367 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.898627 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-22 05:55:24.203852982 +0000 UTC Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.933311 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.933391 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:38 crc kubenswrapper[4703]: E0202 12:52:38.933459 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:38 crc kubenswrapper[4703]: E0202 12:52:38.933580 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.944638 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.944694 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.944707 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.944729 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.945166 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:38Z","lastTransitionTime":"2026-02-02T12:52:38Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:38 crc kubenswrapper[4703]: I0202 12:52:38.949443 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd/etcd-crc"] Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.048042 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.048099 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.048114 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.048137 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.048153 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.151314 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.151368 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.151381 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.151406 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.151420 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.254892 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.254930 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.254944 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.254962 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.254976 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.357244 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.357315 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.357329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.357346 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.357359 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.459158 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.459463 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.459531 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.459640 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.459724 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.562539 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.562641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.562657 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.562674 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.562687 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.665178 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.665307 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.665338 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.665366 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.665383 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.767580 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.767862 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.767951 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.768023 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.768083 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.806076 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.806209 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.806237 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.806256 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.806333 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806400 4703 configmap.go:193] Couldn't get configMap openshift-network-console/networking-console-plugin: object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806446 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.806426303 +0000 UTC m=+150.821633837 (durationBeforeRetry 1m4s). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806455 4703 secret.go:188] Couldn't get secret openshift-network-console/networking-console-plugin-cert: object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806407 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806483 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/kube-root-ca.crt: object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806509 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806514 4703 projected.go:288] Couldn't get configMap openshift-network-diagnostics/openshift-service-ca.crt: object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806524 4703 projected.go:194] Error preparing data for projected volume kube-api-access-s2dwl for pod openshift-network-diagnostics/network-check-source-55646444c4-trplf: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806535 4703 projected.go:194] Error preparing data for projected volume kube-api-access-cqllr for pod openshift-network-diagnostics/network-check-target-xd92c: [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806489 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.806459623 +0000 UTC m=+150.821667157 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "nginx-conf" (UniqueName: "kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806581 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert podName:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.806562586 +0000 UTC m=+150.821770190 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "networking-console-plugin-cert" (UniqueName: "kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert") pod "networking-console-plugin-85b44fc459-gdk6g" (UID: "5fe485a1-e14f-4c09-b5b9-f252bc42b7e8") : object "openshift-network-console"/"networking-console-plugin-cert" not registered Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806599 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl podName:9d751cbb-f2e2-430d-9754-c882a5e924a5 nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.806591007 +0000 UTC m=+150.821798661 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-s2dwl" (UniqueName: "kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl") pod "network-check-source-55646444c4-trplf" (UID: "9d751cbb-f2e2-430d-9754-c882a5e924a5") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.806611 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr podName:3b6479f0-333b-4a96-9adf-2099afdc2447 nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.806605807 +0000 UTC m=+150.821813471 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "kube-api-access-cqllr" (UniqueName: "kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr") pod "network-check-target-xd92c" (UID: "3b6479f0-333b-4a96-9adf-2099afdc2447") : [object "openshift-network-diagnostics"/"kube-root-ca.crt" not registered, object "openshift-network-diagnostics"/"openshift-service-ca.crt" not registered] Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.870077 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.870302 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.870364 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.870458 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.870526 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.899331 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-15 09:36:47.657883251 +0000 UTC Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.933135 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.933285 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.933459 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:39 crc kubenswrapper[4703]: E0202 12:52:39.933516 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.972108 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.972146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.972156 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.972178 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:39 crc kubenswrapper[4703]: I0202 12:52:39.972191 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:39Z","lastTransitionTime":"2026-02-02T12:52:39Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.073922 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.073959 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.073967 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.073981 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.073992 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.176461 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.176488 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.176498 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.176512 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.176521 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.279206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.279237 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.279248 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.279263 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.279288 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.381426 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.381477 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.381486 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.381499 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.381508 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.484625 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.484692 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.484706 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.484728 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.484740 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.587561 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.587599 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.587607 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.587621 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.587630 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.690497 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.690539 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.690549 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.690564 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.690573 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.793563 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.793600 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.793611 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.793625 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.793634 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.896562 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.896606 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.896619 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.896636 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.896647 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.899735 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-15 13:07:51.05279442 +0000 UTC Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.933630 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.933646 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:40 crc kubenswrapper[4703]: E0202 12:52:40.933752 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:40 crc kubenswrapper[4703]: E0202 12:52:40.933902 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.999569 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.999617 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.999627 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.999644 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:40 crc kubenswrapper[4703]: I0202 12:52:40.999653 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:40Z","lastTransitionTime":"2026-02-02T12:52:40Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.101701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.101746 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.101756 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.101791 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.101804 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.203910 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.203943 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.203954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.203971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.203984 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.306009 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.306048 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.306056 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.306070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.306078 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.408768 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.408857 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.408893 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.408970 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.409005 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.512075 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.512119 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.512132 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.512149 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.512162 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.614016 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.614048 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.614058 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.614073 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.614084 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.717028 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.717071 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.717080 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.717093 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.717103 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.819306 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.819353 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.819365 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.819382 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.819394 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.900138 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-21 09:22:07.50291862 +0000 UTC Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.921434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.921508 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.921521 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.921551 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.921570 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:41Z","lastTransitionTime":"2026-02-02T12:52:41Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.933187 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:41 crc kubenswrapper[4703]: E0202 12:52:41.933414 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:41 crc kubenswrapper[4703]: I0202 12:52:41.933208 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:41 crc kubenswrapper[4703]: E0202 12:52:41.933603 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.024096 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.024134 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.024144 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.024159 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.024170 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.126231 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.126291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.126303 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.126319 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.126328 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.228685 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.228751 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.228780 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.228795 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.228806 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.331093 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.331140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.331149 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.331175 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.331187 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.433446 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.433491 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.433501 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.433515 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.433526 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.495444 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.495486 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.495496 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.495511 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.495523 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.508945 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.512435 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.512474 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.512484 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.512497 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.512506 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.524367 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.527643 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.527679 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.527688 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.527701 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.527711 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.539964 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.543670 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.543747 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.543759 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.543775 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.543785 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.555844 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.559628 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.559679 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.559694 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.559714 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.559729 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.573883 4703 kubelet_node_status.go:585] "Error updating node status, will retry" err="failed to patch status \"{\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"type\\\":\\\"DiskPressure\\\"},{\\\"type\\\":\\\"PIDPressure\\\"},{\\\"type\\\":\\\"Ready\\\"}],\\\"allocatable\\\":{\\\"cpu\\\":\\\"11800m\\\",\\\"ephemeral-storage\\\":\\\"76396645454\\\",\\\"memory\\\":\\\"32404568Ki\\\"},\\\"capacity\\\":{\\\"cpu\\\":\\\"12\\\",\\\"ephemeral-storage\\\":\\\"83293888Ki\\\",\\\"memory\\\":\\\"32865368Ki\\\"},\\\"conditions\\\":[{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient memory available\\\",\\\"reason\\\":\\\"KubeletHasSufficientMemory\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"MemoryPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has no disk pressure\\\",\\\"reason\\\":\\\"KubeletHasNoDiskPressure\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"DiskPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"kubelet has sufficient PID available\\\",\\\"reason\\\":\\\"KubeletHasSufficientPID\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PIDPressure\\\"},{\\\"lastHeartbeatTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:42Z\\\",\\\"message\\\":\\\"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\\\",\\\"reason\\\":\\\"KubeletNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"}],\\\"images\\\":[{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b9ea248f8ca33258fe1683da51d2b16b94630be1b361c65f68a16c1a34b94887\\\"],\\\"sizeBytes\\\":2887430265},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:4a62fa1c0091f6d94e8fb7258470b9a532d78364b6b51a05341592041d598562\\\",\\\"registry.redhat.io/redhat/redhat-operator-index@sha256:8db792bab418e30d9b71b9e1ac330ad036025257abbd2cd32f318ed14f70d6ac\\\",\\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1523204510},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\"],\\\"sizeBytes\\\":1498102846},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\"],\\\"sizeBytes\\\":1232839934},{\\\"names\\\":[\\\"registry.redhat.io/redhat/community-operator-index@sha256:8ff55cdb2367f5011074d2f5ebdc153b8885e7495e14ae00f99d2b7ab3584ade\\\",\\\"registry.redhat.io/redhat/community-operator-index@sha256:d656c1453f2261d9b800f5c69fba3bc2ffdb388414c4c0e89fcbaa067d7614c4\\\",\\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1151049424},{\\\"names\\\":[\\\"registry.redhat.io/redhat/certified-operator-index@sha256:1d7d4739b2001bd173f2632d5f73724a5034237ee2d93a02a21bbfff547002ba\\\",\\\"registry.redhat.io/redhat/certified-operator-index@sha256:7688bce5eb0d153adff87fc9f7a47642465c0b88208efb236880197969931b37\\\",\\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"],\\\"sizeBytes\\\":1032059094},{\\\"names\\\":[\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:0878ac12c537fcfc617a539b3b8bd329ba568bb49c6e3bb47827b177c47ae669\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index@sha256:1dc15c170ebf462dacaef75511740ed94ca1da210f3980f66d77f91ba201c875\\\",\\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"],\\\"sizeBytes\\\":1001152198},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\"],\\\"sizeBytes\\\":964552795},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\"],\\\"sizeBytes\\\":947616130},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c3cc3840d7a81ce1b420f06e07a923861faf37d9c10688aa3aa0b7b76c8706ad\\\"],\\\"sizeBytes\\\":907837715},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:101f295e2eae0755ae1865f7de885db1f17b9368e4120a713bb5f79e17ce8f93\\\"],\\\"sizeBytes\\\":854694423},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:47b0670fa1051335fd2d2c9e8361e4ed77c7760c33a2180b136f7c7f59863ec2\\\"],\\\"sizeBytes\\\":852490370},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:862f4a4bed52f372056b6d368e2498ebfb063075b31cf48dbdaaeedfcf0396cb\\\"],\\\"sizeBytes\\\":772592048},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\"],\\\"sizeBytes\\\":705793115},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\"],\\\"sizeBytes\\\":687915987},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f247257b0885cf5d303e3612c7714b33ae51404cfa2429822060c6c025eb17dd\\\"],\\\"sizeBytes\\\":668060419},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\"],\\\"sizeBytes\\\":613826183},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e3e9dc0b02b9351edf7c46b1d46d724abd1ac38ecbd6bc541cee84a209258d8\\\"],\\\"sizeBytes\\\":581863411},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\"],\\\"sizeBytes\\\":574606365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ee8d8f089ec1488067444c7e276c4e47cc93840280f3b3295484d67af2232002\\\"],\\\"sizeBytes\\\":550676059},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:10f20a39f16ae3019c62261eda8beb9e4d8c36cbb7b500b3bae1312987f0685d\\\"],\\\"sizeBytes\\\":541458174},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\"],\\\"sizeBytes\\\":533092226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\"],\\\"sizeBytes\\\":528023732},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\"],\\\"sizeBytes\\\":510867594},{\\\"names\\\":[\\\"quay.io/crcont/ocp-release@sha256:0b6ae0d091d2bf49f9b3a3aff54aabdc49e70c783780f118789f49d8f95a9e03\\\"],\\\"sizeBytes\\\":510526836},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\"],\\\"sizeBytes\\\":507459597},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7e9e7dd2b1a8394b7490ca6df8a3ee8cdfc6193ecc6fb6173ed9a1868116a207\\\"],\\\"sizeBytes\\\":505721947},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:094bb6a6641b4edbaf932f0551bcda20b0d4e012cbe84207348b24eeabd351e9\\\"],\\\"sizeBytes\\\":504778226},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c69fe7a98a744b7a7b61b2a8db81a338f373cd2b1d46c6d3f02864b30c37e46c\\\"],\\\"sizeBytes\\\":504735878},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e51e6f78ec20ef91c82e94a49f950e427e77894e582dcc406eec4df807ddd76e\\\"],\\\"sizeBytes\\\":502943148},{\\\"names\\\":[\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\"],\\\"sizeBytes\\\":501379880},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:3a741253807c962189819d879b8fef94a9452fb3f5f3969ec3207eb2d9862205\\\"],\\\"sizeBytes\\\":500472212},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\"],\\\"sizeBytes\\\":498888951},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5aa9e5379bfeb63f4e517fb45168eb6820138041641bbdfc6f4db6427032fa37\\\"],\\\"sizeBytes\\\":497832828},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\"],\\\"sizeBytes\\\":497742284},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:88b1f0a05a1b1c91e1212b40f0e7d04c9351ec9d34c52097bfdc5897b46f2f0e\\\"],\\\"sizeBytes\\\":497120598},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:737e9019a072c74321e0a909ca95481f5c545044dd4f151a34d0e1c8b9cf273f\\\"],\\\"sizeBytes\\\":488494681},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:fe009d03910e18795e3bd60a3fd84938311d464d2730a2af5ded5b24e4d05a6b\\\"],\\\"sizeBytes\\\":487097366},{\\\"names\\\":[\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:66760a53b64d381940757ca9f0d05f523a61f943f8da03ce9791e5d05264a736\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner@sha256:e97a0cb5b6119a9735efe0ac24630a8912fcad89a1dddfa76dc10edac4ec9815\\\",\\\"registry.redhat.io/openshift4/ose-csi-external-provisioner:latest\\\"],\\\"sizeBytes\\\":485998616},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\"],\\\"sizeBytes\\\":485767738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:898cae57123c5006d397b24af21b0f24a0c42c9b0be5ee8251e1824711f65820\\\"],\\\"sizeBytes\\\":485535312},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:1eda5ad6a6c5b9cd94b4b456e9116f4a0517241b614de1a99df14baee20c3e6a\\\"],\\\"sizeBytes\\\":479585218},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:487c0a8d5200bcdce484ab1169229d8fcb8e91a934be45afff7819c4f7612f57\\\"],\\\"sizeBytes\\\":476681373},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:b641ed0d63034b23d07eb0b2cd455390e83b186e77375e2d3f37633c1ddb0495\\\"],\\\"sizeBytes\\\":473958144},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:32f9e10dfb8a7c812ea8b3e71a42bed9cef05305be18cc368b666df4643ba717\\\"],\\\"sizeBytes\\\":463179365},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:8fdf28927b06a42ea8af3985d558c84d9efd142bb32d3892c4fa9f5e0d98133c\\\"],\\\"sizeBytes\\\":460774792},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:dd0628f89ad843d82d5abfdc543ffab6a861a23cc3005909bd88fa7383b71113\\\"],\\\"sizeBytes\\\":459737917},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\"],\\\"sizeBytes\\\":457588564},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:adabc3456bf4f799f893d792cdf9e8cbc735b070be346552bcc99f741b0a83aa\\\"],\\\"sizeBytes\\\":450637738},{\\\"names\\\":[\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:342dca43b5b09123737ccda5e41b4a5d564e54333d8ce04d867d3fb968600317\\\"],\\\"sizeBytes\\\":448887027}],\\\"nodeInfo\\\":{\\\"bootID\\\":\\\"1293eb9d-82ee-4ca6-9a67-93a06ad7a634\\\",\\\"systemUUID\\\":\\\"50e23aaa-7ae0-4b56-bf68-da927f666ae9\\\"}}}\" for node \"crc\": Internal error occurred: failed calling webhook \"node.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/node?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:42Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.574021 4703 kubelet_node_status.go:572] "Unable to update node status" err="update node status exceeds retry count" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.575540 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.575566 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.575575 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.575589 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.575598 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.677704 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.677848 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.677859 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.677874 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.677885 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.781158 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.781199 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.781215 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.781231 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.781242 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.883623 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.883664 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.883679 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.883714 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.883726 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.900855 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-13 06:08:58.181924696 +0000 UTC Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.933015 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.933015 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.933130 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:42 crc kubenswrapper[4703]: E0202 12:52:42.933198 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.986418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.986486 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.986507 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.986536 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:42 crc kubenswrapper[4703]: I0202 12:52:42.986557 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:42Z","lastTransitionTime":"2026-02-02T12:52:42Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.088415 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.088456 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.088467 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.088482 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.088493 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.191202 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.191254 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.191291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.191315 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.191329 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.293946 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.293986 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.294007 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.294026 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.294039 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.396212 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.396255 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.396265 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.396303 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.396313 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.497970 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.498041 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.498059 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.498087 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.498105 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.600377 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.600422 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.600434 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.600452 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.600464 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.702217 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.702262 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.702292 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.702311 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.702328 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.804477 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.804511 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.804520 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.804532 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.804542 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.901336 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 03:08:32.780104016 +0000 UTC Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.907120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.907161 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.907181 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.907196 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.907206 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:43Z","lastTransitionTime":"2026-02-02T12:52:43Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.933373 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.933478 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:43 crc kubenswrapper[4703]: E0202 12:52:43.934020 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:43 crc kubenswrapper[4703]: E0202 12:52:43.934038 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.949529 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/network-operator-58b4c7f79c-55gtf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"37a5e44f-9a88-4405-be8a-b645485e7312\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f608e5ab98efefac550522b7310bf437b40c3c4ad28869705564c867e844681e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-operator\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"host-etc-kube\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/serving-cert\\\",\\\"name\\\":\\\"metrics-tls\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rdwmf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"network-operator-58b4c7f79c-55gtf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.963526 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-operator/iptables-alerter-4ln5h" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"d75a4c96-2883-4a0b-bab2-0fab2b6c0b49\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:39Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0eb121a79143a22d0df4fac6e00ecfed4a04aaf7262c404f33985f9bd415d108\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"iptables-alerter\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/iptables-alerter\\\",\\\"name\\\":\\\"iptables-alerter-script\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-rczfb\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-operator\"/\"iptables-alerter-4ln5h\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.982790 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-node-identity/network-node-identity-vrzqb" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"ef543e1b-8068-4ea3-b32a-61027b32e95d\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9202dd3d8a65f65fbbf4098db03682dbed3874a50baa4c137e501270fd0ba5c6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"approver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://1cc80cfeb4cf4db8b8335606acf66766549b277380b99fbb02fd44a71f99df33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"webhook\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/webhook-cert/\\\",\\\"name\\\":\\\"webhook-cert\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/ovnkube-identity-config\\\",\\\"name\\\":\\\"ovnkube-identity-cm\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2kz5\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}]}}\" for pod \"openshift-network-node-identity\"/\"network-node-identity-vrzqb\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:43 crc kubenswrapper[4703]: I0202 12:52:43.996004 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-dns/node-resolver-279cn" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"66fba6d8-0a3d-4fbe-ab6f-80a2cc2a05d8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://db752aa3fccdb7eafa7450308f33955a0c88da1da38c69bfc4e3ceea8864f6d4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:35512335ac39aed0f55b7f799f416f4f6445c20c1b19888cf2bb72bb276703f2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"dns-node-resolver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/hosts\\\",\\\"name\\\":\\\"hosts-file\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-k6txp\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-dns\"/\"node-resolver-279cn\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:43Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.010826 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.010879 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.010897 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.010927 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.010949 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.022874 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-etcd/etcd-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"b46d550d-5695-45b4-8521-a4044b2ce8eb\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:29Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://69264b9130879a7e1d9a6a12ec2a1f9210ba1d25e19dffa05ec70aec9bd35a9a\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://d9ed6afad2db01385c2a5a3ec60528543897e13a9e81a65436b3e660d6038a8e\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://7dda63a16d941b1b01e17af6302247cd80145fff62a308f6c6d36cf3e953ebb2\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd/\\\",\\\"name\\\":\\\"log-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://61cf67f8cfcf7b14af774a7ae02fea74b7a13dd5a0c5f4c02b916ffe181444d0\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:a0fa3723269019bee1847b26702f42928e779036cc2f58408f8ee7866be30a93\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-rev\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:19Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/lib/etcd\\\",\\\"name\\\":\\\"data-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://01af4039ed36e4babdbe2982d0acf73eab09079b5678273e490d3dc73f6db562\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcdctl\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:18Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/manifests\\\",\\\"name\\\":\\\"static-pod-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/lib/etcd/\\\",\\\"name\\\":\\\"data-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://8c37335c5a7e3161e13c7faaf991ea0e954f790d5dadd2a054c565bb1cc2ddbf\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://8c37335c5a7e3161e13c7faaf991ea0e954f790d5dadd2a054c565bb1cc2ddbf\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/etcd\\\",\\\"name\\\":\\\"log-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://1e7c83c32acd3792c8e0ba3694aef53d312b50f075ed59918bcc3019e566f4b5\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-ensure-env-vars\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://1e7c83c32acd3792c8e0ba3694aef53d312b50f075ed59918bcc3019e566f4b5\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:16Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}}},{\\\"containerID\\\":\\\"cri-o://c998bcb529632b8579f505d585c0fecc15b89bb42ba32ca3e2076447229abc98\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:07b7c6877441ecd6a5646fb68e33e9be8b90092272e49117b54b4a67314731ca\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"etcd-resources-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://c998bcb529632b8579f505d585c0fecc15b89bb42ba32ca3e2076447229abc98\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/usr/local/bin\\\",\\\"name\\\":\\\"usr-local-bin\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-etcd\"/\"etcd-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.039174 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"3b6479f0-333b-4a96-9adf-2099afdc2447\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [network-check-target-container]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"network-check-target-container\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-cqllr\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-target-xd92c\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.052607 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-image-registry/node-ca-qqhwl" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5deea29f-df1d-4530-b29b-f50e5f40edeb\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0e1970817c95df4d19726276d124fb293a1569212ea73a5498bf47a4a485ec33\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:9fa29d188c85a8b1e1bd15c9c18e96f1b235da9bd4a45dbc086a4a69520ed63f\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"node-ca\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/tmp/serviceca\\\",\\\"name\\\":\\\"serviceca\\\"},{\\\"mountPath\\\":\\\"/etc/docker/certs.d\\\",\\\"name\\\":\\\"host\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-d96wm\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:38Z\\\"}}\" for pod \"openshift-image-registry\"/\"node-ca-qqhwl\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.064179 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/network-metrics-daemon-n2htj" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"message\\\":\\\"containers with unready status: [network-metrics-daemon kube-rbac-proxy]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/metrics\\\",\\\"name\\\":\\\"metrics-certs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:d98bb346a17feae024d92663df92b25c120938395ab7043afbed543c6db9ca8d\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"network-metrics-daemon\\\",\\\"ready\\\":false,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-qxsp8\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-multus\"/\"network-metrics-daemon-n2htj\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.076324 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [networking-console-plugin]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:ae647598ec35cda5766806d3d44a91e3b9d4dee48ff154f3d8490165399873fd\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"networking-console-plugin\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/cert\\\",\\\"name\\\":\\\"networking-console-plugin-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/nginx/nginx.conf\\\",\\\"name\\\":\\\"nginx-conf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-console\"/\"networking-console-plugin-85b44fc459-gdk6g\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.090287 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"c18a759f-5f28-4f90-866f-8f90476ba69c\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://f8632aa97c6934ac9cb2b0c3cf93908a44ee72915c29b28e05ba1515d6f63345\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/tls/private\\\",\\\"name\\\":\\\"proxy-tls\\\"},{\\\"mountPath\\\":\\\"/etc/kube-rbac-proxy\\\",\\\"name\\\":\\\"mcd-auth-proxy-config\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c915fb8ba96e911699a1ae34a8e95ca8a9fbe1bf8c28fea177225c63a8bdfc0a\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"machine-config-daemon\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/rootfs\\\",\\\"name\\\":\\\"rootfs\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-9xbrf\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:35Z\\\"}}\" for pod \"openshift-machine-config-operator\"/\"machine-config-daemon-2vnzs\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.104433 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-8vjml" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"5fe22056-9a8b-4eba-8776-c50531078e2f\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:26Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:25Z\\\",\\\"message\\\":\\\"2026-02-02T12:51:39+00:00 [cnibincopy] Successfully copied files in /usr/src/multus-cni/rhel9/bin/ to /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc\\\\n2026-02-02T12:51:39+00:00 [cnibincopy] Successfully moved files in /host/opt/cni/bin/upgrade_438387ec-6c8e-44c2-8014-5723401b93fc to /host/opt/cni/bin/\\\\n2026-02-02T12:51:40Z [verbose] multus-daemon started\\\\n2026-02-02T12:51:40Z [verbose] Readiness Indicator file check\\\\n2026-02-02T12:52:25Z [error] have you checked that your default network is ready? still waiting for readinessindicatorfile @ /host/run/multus/cni/net.d/10-ovn-kubernetes.conf. pollimmediate error: timed out waiting for the condition\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"name\\\":\\\"kube-multus\\\",\\\"ready\\\":true,\\\"restartCount\\\":1,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:52:25Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/run/multus/cni/net.d\\\",\\\"name\\\":\\\"multus-cni-dir\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/run/multus\\\",\\\"name\\\":\\\"multus-socket-dir-parent\\\"},{\\\"mountPath\\\":\\\"/run/k8s.cni.cncf.io\\\",\\\"name\\\":\\\"host-run-k8s-cni-cncf-io\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/bin\\\",\\\"name\\\":\\\"host-var-lib-cni-bin\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/multus\\\",\\\"name\\\":\\\"host-var-lib-cni-multus\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-var-lib-kubelet\\\"},{\\\"mountPath\\\":\\\"/hostroot\\\",\\\"name\\\":\\\"hostroot\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/net.d\\\",\\\"name\\\":\\\"multus-conf-dir\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d/multus.d\\\",\\\"name\\\":\\\"multus-daemon-config\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/cni/multus/certs\\\",\\\"name\\\":\\\"host-run-multus-certs\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes\\\",\\\"name\\\":\\\"etc-kubernetes\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-4f477\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-8vjml\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.113828 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.113882 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.113901 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.113942 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.113959 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.119469 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"28c3b814-7b2a-401b-b3c2-cad3350907f2\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:03Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://84f4a7aefd168baee0fc8d7d2ea373004795ecf73dd87556b4bf15b61f63f21c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://82bafb6ef2b685c0041fb7451167dc12df67713d7b8cfe576d1f18248551f695\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://ac1b2ea8802c2a7165e75423cc34480850e5c85dc4f63e07584a45cc1e8992fb\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:5b881c97aa8e440c6b3ca001edfd789a9380066b8f11f35a8dd8d88c5c7dbf86\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-scheduler-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"wait-for-host-port\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a9a802dddd8b03056690ca95ad18694cc4f7eba70397d2a164f535139998e361\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}}}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-scheduler\"/\"openshift-kube-scheduler-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.132904 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"df3f0463-5c25-4b5a-9b7e-4804a45ce55d\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:17Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ab77a84d2c8051533845eac6ebb0b3487f7e6dc932e1512cb786aa05ffadd6f8\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:c0f9da410c07372b6c9ad6a79379b491cd10fdee88051c026b084652d85aed21\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cluster-policy-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a425dcd5a8bcbe855dfd3c1304bfca742a2d90d36e7f97f1e1ef8b097f5df964\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://a500d3749e7ea6611a5887efa02d4132d8520c0f346912ecf4c891a20661c79b\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-controller-manager-operator@sha256:8506ce0a578bc18fac117eb2b82799488ffac0bed08287faaf92edaf5d17ab95\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-controller-manager-recovery-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-controller-manager\"/\"kube-controller-manager-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.149255 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"7fa7c43a-a618-412d-8e3a-aee8148de65b\\\"},\\\"status\\\":{\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:52:07Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:14Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodScheduled\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"},{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-regeneration-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-cert-syncer\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:16Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca\\\",\\\"exitCode\\\":255,\\\"finishedAt\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"le observer\\\\nW0202 12:51:34.936246 1 builder.go:272] unable to get owner reference (falling back to namespace): pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\nI0202 12:51:34.936389 1 builder.go:304] check-endpoints version 4.18.0-202502101302.p0.g763313c.assembly.stream.el9-763313c-763313c860ea43fcfc9b1ac00ebae096b57c078e\\\\nI0202 12:51:34.936999 1 dynamic_serving_content.go:116] \\\\\\\"Loaded a new cert/key pair\\\\\\\" name=\\\\\\\"serving-cert::/tmp/serving-cert-3822757158/tls.crt::/tmp/serving-cert-3822757158/tls.key\\\\\\\"\\\\nI0202 12:51:35.306002 1 requestheader_controller.go:247] Loaded a new request header values for RequestHeaderAuthRequestController\\\\nI0202 12:51:35.308663 1 maxinflight.go:139] \\\\\\\"Initialized nonMutatingChan\\\\\\\" len=400\\\\nI0202 12:51:35.308687 1 maxinflight.go:145] \\\\\\\"Initialized mutatingChan\\\\\\\" len=200\\\\nI0202 12:51:35.308712 1 maxinflight.go:116] \\\\\\\"Set denominator for readonly requests\\\\\\\" limit=400\\\\nI0202 12:51:35.308717 1 maxinflight.go:120] \\\\\\\"Set denominator for mutating requests\\\\\\\" limit=200\\\\nI0202 12:51:35.312671 1 secure_serving.go:57] Forcing use of http/1.1 only\\\\nI0202 12:51:35.312678 1 genericapiserver.go:533] MuxAndDiscoveryComplete has all endpoints registered and discovery information is complete\\\\nW0202 12:51:35.312708 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312759 1 secure_serving.go:69] Use of insecure cipher 'TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256' detected.\\\\nW0202 12:51:35.312779 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_GCM_SHA256' detected.\\\\nW0202 12:51:35.312783 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_GCM_SHA384' detected.\\\\nW0202 12:51:35.312787 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_128_CBC_SHA' detected.\\\\nW0202 12:51:35.312791 1 secure_serving.go:69] Use of insecure cipher 'TLS_RSA_WITH_AES_256_CBC_SHA' detected.\\\\nF0202 12:51:35.313932 1 cmd.go:182] pods \\\\\\\"kube-apiserver-crc\\\\\\\" not found\\\\n\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:20Z\\\"}},\\\"name\\\":\\\"kube-apiserver-check-endpoints\\\",\\\"ready\\\":true,\\\"restartCount\\\":2,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:55Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-resources\\\",\\\"name\\\":\\\"resource-dir\\\"},{\\\"mountPath\\\":\\\"/etc/kubernetes/static-pod-certs\\\",\\\"name\\\":\\\"cert-dir\\\"}]},{\\\"containerID\\\":\\\"cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\\\",\\\"image\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"imageID\\\":\\\"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-apiserver-insecure-readyz\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:17Z\\\"}}}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:06bc35825771aee1220d34720243b89c4ba8a8b335e6de2597126bd791fd90d4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:15Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:15Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/log/kube-apiserver\\\",\\\"name\\\":\\\"audit-dir\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:14Z\\\"}}\" for pod \"openshift-kube-apiserver\"/\"kube-apiserver-crc\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.166158 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"9d751cbb-f2e2-430d-9754-c882a5e924a5\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:35Z\\\",\\\"message\\\":\\\"containers with unready status: [check-endpoints]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e1baa38811c04bd8909e01a1f3be7421a1cb99d608d3dc4cf86d95b17de2ab8b\\\",\\\"imageID\\\":\\\"\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"exitCode\\\":137,\\\"finishedAt\\\":null,\\\"message\\\":\\\"The container could not be located when the pod was deleted. The container used to be Running\\\",\\\"reason\\\":\\\"ContainerStatusUnknown\\\",\\\"startedAt\\\":null}},\\\"name\\\":\\\"check-endpoints\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"waiting\\\":{\\\"reason\\\":\\\"ContainerCreating\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-s2dwl\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"podIP\\\":null,\\\"podIPs\\\":null}}\" for pod \"openshift-network-diagnostics\"/\"network-check-source-55646444c4-trplf\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.188017 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"f5d4c5d7-4a35-465b-aed5-64f5cfe37533\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:44Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:45Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://d83e75420f2fccc06afbd944ce9371f532c14f8a59670b06ff681d7b5ba8e8aa\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:7eeaee65f2808b819eedb413bdcabb9144e12f0dd97f13fd1afba93a95b67b26\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-multus-additional-cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:44Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:e40792096b162f0f9ce5f8362f51e5f8dea2c1ce4b1447235388416b5db7708c\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"egress-router-binary-copy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://a067f01b2ae7f9934ddfcf4cca37d1b25caac9943694f9790c33488935db60a3\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:687fddfbb085a1688df312ce4ec8c857df9b2daed8ff4a7ed6163a1154afa2cc\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"cni-plugins\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://18009a5c434f9dd93a6e0cf07363de3bff1512085ad3c4759f7b06bb2844f26b\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:39Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/tuning/\\\",\\\"name\\\":\\\"tuning-conf-dir\\\"},{\\\"mountPath\\\":\\\"/sysctls\\\",\\\"name\\\":\\\"cni-sysctl-allowlist\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:383f4cceeeaead203bb2327fdd367c64b64d729d7fa93089f249e496fcef0c78\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"bond-cni-plugin\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://833ca96872b110d49fe193d10dfec5e0318b415e0a5aa5b39cdc23587282b744\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:40Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:40Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:f567acb85146b5ed81451ec3e79f2de0c62e28c69b2eeade0abdf5d0c388e7aa\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"routeoverride-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://d54cad67212130b1e5cd81d6e2912a0f2c6595fc6c09f661eed79ee0684d7205\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:41Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni-bincopy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://543643ee4f93c17a9ff9e5eea72396a724b41f7b3dbae787c83ca21e85beb20f\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:42Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:42Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/entrypoint\\\",\\\"name\\\":\\\"cni-binary-copy\\\"},{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/os-release\\\",\\\"name\\\":\\\"os-release\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:98100674616e54319f6713d742fd0c3bdbc84e6e6173e8ccf4a2473a714c2bc4\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"whereabouts-cni\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://dfd98e55ac891494f4c530537621a8c833612c610c7467a7b6e9a51ad9a7673c\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:43Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:43Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/host/opt/cni/bin\\\",\\\"name\\\":\\\"cnibin\\\"},{\\\"mountPath\\\":\\\"/host/etc/cni/net.d\\\",\\\"name\\\":\\\"system-cni-dir\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-hv5xw\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-multus\"/\"multus-additional-cni-plugins-nxxh5\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.208604 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"40400eeb-f9bd-4816-b65f-a25b0c3d021e\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:38Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:36Z\\\",\\\"message\\\":\\\"containers with unready status: [ovnkube-controller]\\\",\\\"reason\\\":\\\"ContainersNotReady\\\",\\\"status\\\":\\\"False\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-node\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy-ovn-metrics\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-node-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"nbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"northd\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-acl-logging\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovn-controller\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:38Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn/\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/dev/log\\\",\\\"name\\\":\\\"log-socket\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://538a1fcfd4cec654f7732feb3483686f95d6a14ae9c02cf6a2a05d54a8e856a4\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:06Z\\\",\\\"message\\\":\\\"r,TrafficDistribution:nil,},Status:ServiceStatus{LoadBalancer:LoadBalancerStatus{Ingress:[]LoadBalancerIngress{},},Conditions:[]Condition{},},}\\\\nI0202 12:52:06.930652 6390 lb_config.go:1031] Cluster endpoints for openshift-marketplace/marketplace-operator-metrics for network=default are: map[]\\\\nI0202 12:52:06.930694 6390 model_client.go:382] Update operations generated as: [{Op:update Table:Logical_Switch_Port Row:map[addresses:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]} options:{GoMap:map[iface-id-ver:5fe485a1-e14f-4c09-b5b9-f252bc42b7e8 requested-chassis:crc]} port_security:{GoSet:[0a:58:0a:d9:00:5c 10.217.0.92]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {c94130be-172c-477c-88c4-40cc7eba30fe}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:06.930699 6390 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin network policy controller, err: could not add Event Handler for anpInformer during admin network policy controller initialization, handler {0x1fcc6e0 0x1fcc3c0 0x1fcc360} was not added to shared informer because it has stopped already, failed to start node network controller: failed to start default node network controller: failed to set node crc annotations: Internal error occurred: fa\\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:06Z\\\"}},\\\"name\\\":\\\"ovnkube-controller\\\",\\\"ready\\\":false,\\\"restartCount\\\":3,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6\\\",\\\"exitCode\\\":1,\\\"finishedAt\\\":\\\"2026-02-02T12:52:37Z\\\",\\\"message\\\":\\\"e column _uuid == {54fbe873-7e6d-475f-a0ad-8dd5f06d850d}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nI0202 12:52:37.179414 6825 metrics.go:553] Stopping metrics server at address \\\\\\\"127.0.0.1:29103\\\\\\\"\\\\nI0202 12:52:37.179446 6825 transact.go:42] Configuring OVN: [{Op:update Table:Load_Balancer Row:map[external_ids:{GoMap:map[k8s.ovn.org/kind:Service k8s.ovn.org/owner:openshift-operator-lifecycle-manager/package-server-manager-metrics]} name:Service_openshift-operator-lifecycle-manager/package-server-manager-metrics_TCP_cluster options:{GoMap:map[event:false hairpin_snat_ip:169.254.0.5 fd69::5 neighbor_responder:none reject:true skip_snat:false]} protocol:{GoSet:[tcp]} selection_fields:{GoSet:[]} vips:{GoMap:map[10.217.4.110:8443:]}] Rows:[] Columns:[] Mutations:[] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {f9232b32-e89f-4c8e-acc4-c6801b70dcb0}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:} {Op:mutate Table:NB_Global Row:map[] Rows:[] Columns:[] Mutations:[{Column:nb_cfg Mutator:+= Value:1}] Timeout:\\\\u003cnil\\\\u003e Where:[where column _uuid == {6011affd-30a6-4be6-872d-e4cf1ca780cf}] Until: Durable:\\\\u003cnil\\\\u003e Comment:\\\\u003cnil\\\\u003e Lock:\\\\u003cnil\\\\u003e UUID: UUIDName:}]\\\\nF0202 12:52:37.179483 6825 ovnkube.go:137] failed to run ovnkube: [failed to start network controller: failed to start default network controller: unable to create admin \\\",\\\"reason\\\":\\\"Error\\\",\\\"startedAt\\\":\\\"2026-02-02T12:52:36Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/var/lib/kubelet\\\",\\\"name\\\":\\\"host-kubelet\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/etc/systemd/system\\\",\\\"name\\\":\\\"systemd-units\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/host\\\",\\\"name\\\":\\\"host-slash\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/ovn-kubernetes/\\\",\\\"name\\\":\\\"host-run-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/netns\\\",\\\"name\\\":\\\"host-run-netns\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/run/systemd/private\\\",\\\"name\\\":\\\"run-systemd\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/cni-bin-dir\\\",\\\"name\\\":\\\"host-cni-bin\\\"},{\\\"mountPath\\\":\\\"/etc/cni/net.d\\\",\\\"name\\\":\\\"host-cni-netd\\\"},{\\\"mountPath\\\":\\\"/var/lib/cni/networks/ovn-k8s-cni-overlay\\\",\\\"name\\\":\\\"host-var-lib-cni-networks-ovn-kubernetes\\\"},{\\\"mountPath\\\":\\\"/run/openvswitch\\\",\\\"name\\\":\\\"run-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/log/ovnkube/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/etc/openvswitch\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/lib/openvswitch\\\",\\\"name\\\":\\\"var-lib-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"sbdb\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:41Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/ovnkube-lib\\\",\\\"name\\\":\\\"ovnkube-script-lib\\\"},{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/run/ovn/\\\",\\\"name\\\":\\\"run-ovn\\\"},{\\\"mountPath\\\":\\\"/var/log/ovn\\\",\\\"name\\\":\\\"node-log\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"initContainerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kubecfg-setup\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":false,\\\"state\\\":{\\\"terminated\\\":{\\\"containerID\\\":\\\"cri-o://ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737\\\",\\\"exitCode\\\":0,\\\"finishedAt\\\":\\\"2026-02-02T12:51:37Z\\\",\\\"reason\\\":\\\"Completed\\\",\\\"startedAt\\\":\\\"2026-02-02T12:51:37Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/ovn/\\\",\\\"name\\\":\\\"etc-openvswitch\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-sg6fs\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:36Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-node-j8d97\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.216952 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.217015 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.217029 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.217054 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.217068 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.228558 4703 status_manager.go:875] "Failed to update status for pod" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" err="failed to patch status \"{\\\"metadata\\\":{\\\"uid\\\":\\\"051f5a57-faac-43af-9d4b-c83992dae9a2\\\"},\\\"status\\\":{\\\"$setElementOrder/conditions\\\":[{\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"type\\\":\\\"Initialized\\\"},{\\\"type\\\":\\\"Ready\\\"},{\\\"type\\\":\\\"ContainersReady\\\"},{\\\"type\\\":\\\"PodScheduled\\\"}],\\\"conditions\\\":[{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"PodReadyToStartContainers\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:49Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Initialized\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"Ready\\\"},{\\\"lastProbeTime\\\":null,\\\"lastTransitionTime\\\":\\\"2026-02-02T12:51:51Z\\\",\\\"status\\\":\\\"True\\\",\\\"type\\\":\\\"ContainersReady\\\"}],\\\"containerStatuses\\\":[{\\\"containerID\\\":\\\"cri-o://c5ecbd3262c020eaab1831f1214c77b828c99a36bef27e04b893b5bf88cfdd0d\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:242b3d66438c42745f4ef318bdeaf3d793426f12962a42ea83e18d06c08aaf09\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"kube-rbac-proxy\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/etc/pki/tls/metrics-cert\\\",\\\"name\\\":\\\"ovn-control-plane-metrics-cert\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]},{\\\"containerID\\\":\\\"cri-o://c98b9f35a1b980b837455fa91be1419a90cedca6e52c783a25c9df2f5100ed5b\\\",\\\"image\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"imageID\\\":\\\"quay.io/openshift-release-dev/ocp-v4.0-art-dev@sha256:174f36cdd47ef0d1d2099482919d773257453265a2af0b17b154edc32fa41ac2\\\",\\\"lastState\\\":{},\\\"name\\\":\\\"ovnkube-cluster-manager\\\",\\\"ready\\\":true,\\\"restartCount\\\":0,\\\"started\\\":true,\\\"state\\\":{\\\"running\\\":{\\\"startedAt\\\":\\\"2026-02-02T12:51:50Z\\\"}},\\\"volumeMounts\\\":[{\\\"mountPath\\\":\\\"/run/ovnkube-config/\\\",\\\"name\\\":\\\"ovnkube-config\\\"},{\\\"mountPath\\\":\\\"/env\\\",\\\"name\\\":\\\"env-overrides\\\"},{\\\"mountPath\\\":\\\"/var/run/secrets/kubernetes.io/serviceaccount\\\",\\\"name\\\":\\\"kube-api-access-54fc7\\\",\\\"readOnly\\\":true,\\\"recursiveReadOnly\\\":\\\"Disabled\\\"}]}],\\\"hostIP\\\":\\\"192.168.126.11\\\",\\\"hostIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"phase\\\":\\\"Running\\\",\\\"podIP\\\":\\\"192.168.126.11\\\",\\\"podIPs\\\":[{\\\"ip\\\":\\\"192.168.126.11\\\"}],\\\"startTime\\\":\\\"2026-02-02T12:51:49Z\\\"}}\" for pod \"openshift-ovn-kubernetes\"/\"ovnkube-control-plane-749d76644c-v86r6\": Internal error occurred: failed calling webhook \"pod.network-node-identity.openshift.io\": failed to call webhook: Post \"https://127.0.0.1:9743/pod?timeout=10s\": tls: failed to verify certificate: x509: certificate has expired or is not yet valid: current time 2026-02-02T12:52:44Z is after 2025-08-24T17:21:41Z" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.320008 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.320074 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.320092 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.320123 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.320145 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.423706 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.423746 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.423758 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.423779 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.423791 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.526940 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.527592 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.527616 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.527643 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.527662 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.631761 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.631842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.631860 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.631891 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.631911 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.735734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.735782 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.735796 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.735815 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.735825 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.839836 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.839895 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.839908 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.839929 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.839942 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.901530 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-05 16:53:48.115123748 +0000 UTC Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.933906 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.933906 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:44 crc kubenswrapper[4703]: E0202 12:52:44.934702 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:44 crc kubenswrapper[4703]: E0202 12:52:44.934852 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.943439 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.943538 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.943564 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.943638 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.943658 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:44Z","lastTransitionTime":"2026-02-02T12:52:44Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:44 crc kubenswrapper[4703]: I0202 12:52:44.955612 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/kube-rbac-proxy-crio-crc"] Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.046780 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.046826 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.046846 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.046867 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.046882 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.149857 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.150180 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.150291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.150410 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.150505 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.252912 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.252954 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.252963 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.252978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.252989 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.355244 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.355305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.355321 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.355335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.355345 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.457738 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.457770 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.457780 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.457792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.457802 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.561770 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.561828 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.561841 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.561860 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.561875 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.664831 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.664932 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.664969 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.665007 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.665072 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.769256 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.769336 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.769350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.769370 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.769383 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.873066 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.873188 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.873209 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.873242 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.873265 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.902477 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-19 05:54:15.713865497 +0000 UTC Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.933000 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.933107 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:45 crc kubenswrapper[4703]: E0202 12:52:45.933302 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:45 crc kubenswrapper[4703]: E0202 12:52:45.933421 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.976636 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.976724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.976750 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.976790 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:45 crc kubenswrapper[4703]: I0202 12:52:45.976817 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:45Z","lastTransitionTime":"2026-02-02T12:52:45Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.080109 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.080188 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.080208 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.080244 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.080264 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.183336 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.183407 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.183447 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.183476 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.183500 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.287023 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.287095 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.287118 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.287150 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.287183 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.389017 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.389065 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.389076 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.389088 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.389101 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.491293 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.491335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.491344 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.491363 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.491375 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.594604 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.594686 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.594700 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.594720 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.594733 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.697083 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.697120 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.697130 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.697147 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.697159 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.799653 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.799757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.799771 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.799788 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.799800 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902632 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-20 10:09:19.66113447 +0000 UTC Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902844 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902906 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902928 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902961 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.902985 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:46Z","lastTransitionTime":"2026-02-02T12:52:46Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.933856 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:46 crc kubenswrapper[4703]: I0202 12:52:46.933874 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:46 crc kubenswrapper[4703]: E0202 12:52:46.934078 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:46 crc kubenswrapper[4703]: E0202 12:52:46.934184 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.006697 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.006755 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.006772 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.006800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.006823 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.109238 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.109305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.109318 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.109335 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.109343 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.211724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.211763 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.211771 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.211786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.211795 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.314505 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.314559 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.314571 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.314595 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.314613 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.417239 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.417325 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.417336 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.417359 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.417372 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.520111 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.520153 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.520164 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.520186 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.520196 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.622308 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.622341 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.622350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.622362 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.622373 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.724629 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.724657 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.724666 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.724683 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.724700 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.827648 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.827699 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.827716 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.827741 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.827758 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.903576 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-24 09:51:04.917299049 +0000 UTC Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.929521 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.929567 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.929576 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.929591 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.929601 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:47Z","lastTransitionTime":"2026-02-02T12:52:47Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.932866 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:47 crc kubenswrapper[4703]: I0202 12:52:47.932948 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:47 crc kubenswrapper[4703]: E0202 12:52:47.932975 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:47 crc kubenswrapper[4703]: E0202 12:52:47.933300 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.031849 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.031895 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.031904 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.031920 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.031939 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.134416 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.134470 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.134484 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.134505 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.134517 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.236931 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.236978 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.236989 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.237004 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.237018 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.340153 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.340193 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.340205 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.340221 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.340232 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.442126 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.442169 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.442178 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.442193 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.442204 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.544544 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.544592 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.544604 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.544624 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.544637 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.647324 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.647381 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.647394 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.647409 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.647420 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.750551 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.750594 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.750609 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.750625 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.750636 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.853920 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.853971 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.853981 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.853998 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.854008 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.904658 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-09 23:03:48.166952937 +0000 UTC Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.933095 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.933174 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:48 crc kubenswrapper[4703]: E0202 12:52:48.933377 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:48 crc kubenswrapper[4703]: E0202 12:52:48.933580 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.956592 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.956637 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.956650 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.956666 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:48 crc kubenswrapper[4703]: I0202 12:52:48.956679 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:48Z","lastTransitionTime":"2026-02-02T12:52:48Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.059296 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.059350 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.059365 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.059386 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.059401 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.161748 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.161785 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.161793 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.161807 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.161816 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.264083 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.264125 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.264134 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.264149 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.264159 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.366391 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.366418 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.366429 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.366443 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.366454 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.473035 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.473146 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.473159 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.473178 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.473196 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.575475 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.575530 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.575541 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.575553 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.575562 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.678604 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.678645 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.678654 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.678671 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.678682 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.782138 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.782206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.782220 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.782250 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.782266 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.884656 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.884757 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.884777 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.884803 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.884820 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.904909 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-02 20:34:15.086179233 +0000 UTC Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.933752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.933801 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:49 crc kubenswrapper[4703]: E0202 12:52:49.934070 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:49 crc kubenswrapper[4703]: E0202 12:52:49.934318 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.988395 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.988463 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.988479 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.988503 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:49 crc kubenswrapper[4703]: I0202 12:52:49.988520 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:49Z","lastTransitionTime":"2026-02-02T12:52:49Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.091750 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.091804 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.091819 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.091842 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.091857 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.195635 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.195760 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.195790 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.195829 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.195855 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.299734 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.299839 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.299854 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.299879 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.299896 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.404329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.404391 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.404406 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.404427 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.404443 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.508202 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.508316 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.508339 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.508369 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.508394 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.611206 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.611290 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.611307 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.611331 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.611346 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.714636 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.714717 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.714736 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.714769 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.714791 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.817639 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.817696 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.817709 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.817731 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.817743 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.905803 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-12-21 09:14:30.410568635 +0000 UTC Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.920758 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.920800 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.920809 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.920827 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.920836 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:50Z","lastTransitionTime":"2026-02-02T12:52:50Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.933521 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.933522 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:50 crc kubenswrapper[4703]: E0202 12:52:50.934054 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:50 crc kubenswrapper[4703]: E0202 12:52:50.934209 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:50 crc kubenswrapper[4703]: I0202 12:52:50.934753 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 12:52:50 crc kubenswrapper[4703]: E0202 12:52:50.935084 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.016227 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/node-resolver-279cn" podStartSLOduration=76.016208243 podStartE2EDuration="1m16.016208243s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.01607005 +0000 UTC m=+98.031277584" watchObservedRunningTime="2026-02-02 12:52:51.016208243 +0000 UTC m=+98.031415777" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.023918 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.023983 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.023998 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.024025 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.024044 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.063464 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd/etcd-crc" podStartSLOduration=13.063426477 podStartE2EDuration="13.063426477s" podCreationTimestamp="2026-02-02 12:52:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.062298968 +0000 UTC m=+98.077506522" watchObservedRunningTime="2026-02-02 12:52:51.063426477 +0000 UTC m=+98.078634051" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.105643 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/node-ca-qqhwl" podStartSLOduration=76.105605319 podStartE2EDuration="1m16.105605319s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.105542558 +0000 UTC m=+98.120750092" watchObservedRunningTime="2026-02-02 12:52:51.105605319 +0000 UTC m=+98.120812863" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129322 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=75.129300888 podStartE2EDuration="1m15.129300888s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.124517313 +0000 UTC m=+98.139724847" watchObservedRunningTime="2026-02-02 12:52:51.129300888 +0000 UTC m=+98.144508422" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129452 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129485 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129495 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129513 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.129526 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.184424 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podStartSLOduration=76.184394748 podStartE2EDuration="1m16.184394748s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.170092674 +0000 UTC m=+98.185300208" watchObservedRunningTime="2026-02-02 12:52:51.184394748 +0000 UTC m=+98.199602282" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.197399 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/kube-rbac-proxy-crio-crc" podStartSLOduration=7.197368497 podStartE2EDuration="7.197368497s" podCreationTimestamp="2026-02-02 12:52:44 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.197171361 +0000 UTC m=+98.212378895" watchObservedRunningTime="2026-02-02 12:52:51.197368497 +0000 UTC m=+98.212576031" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.198099 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-8vjml" podStartSLOduration=76.198094706 podStartE2EDuration="1m16.198094706s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.185207809 +0000 UTC m=+98.200415363" watchObservedRunningTime="2026-02-02 12:52:51.198094706 +0000 UTC m=+98.213302240" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.211998 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler/openshift-kube-scheduler-crc" podStartSLOduration=48.211980048 podStartE2EDuration="48.211980048s" podCreationTimestamp="2026-02-02 12:52:03 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.211002833 +0000 UTC m=+98.226210367" watchObservedRunningTime="2026-02-02 12:52:51.211980048 +0000 UTC m=+98.227187582" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.230047 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/kube-controller-manager-crc" podStartSLOduration=76.23002339 podStartE2EDuration="1m16.23002339s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.229526117 +0000 UTC m=+98.244733651" watchObservedRunningTime="2026-02-02 12:52:51.23002339 +0000 UTC m=+98.245230924" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.231092 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.231131 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.231141 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.231157 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.231166 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.249234 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-additional-cni-plugins-nxxh5" podStartSLOduration=76.249207701 podStartE2EDuration="1m16.249207701s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.24765718 +0000 UTC m=+98.262864724" watchObservedRunningTime="2026-02-02 12:52:51.249207701 +0000 UTC m=+98.264415245" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.292810 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-control-plane-749d76644c-v86r6" podStartSLOduration=75.292778149 podStartE2EDuration="1m15.292778149s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:51.290155871 +0000 UTC m=+98.305363405" watchObservedRunningTime="2026-02-02 12:52:51.292778149 +0000 UTC m=+98.307985693" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.333291 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.333326 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.333337 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.333353 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.333364 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.437028 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.437096 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.437115 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.437140 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.437160 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.539724 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.539781 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.539792 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.539810 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.539826 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.641993 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.642053 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.642070 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.642089 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.642122 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.744305 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.744362 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.744377 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.744399 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.744413 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.847124 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.847186 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.847204 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.847228 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.847245 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.906146 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-26 14:59:07.613352668 +0000 UTC Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.932870 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:51 crc kubenswrapper[4703]: E0202 12:52:51.933007 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.933381 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:51 crc kubenswrapper[4703]: E0202 12:52:51.933511 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.950012 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.950061 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.950071 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.950089 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:51 crc kubenswrapper[4703]: I0202 12:52:51.950100 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:51Z","lastTransitionTime":"2026-02-02T12:52:51Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.053574 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.053627 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.053641 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.053662 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.053679 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.157786 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.157874 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.157893 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.157917 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.157931 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.261482 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.261569 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.261597 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.261629 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.261653 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.364091 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.364372 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.364453 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.364529 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.364646 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.467012 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.467329 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.467430 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.467507 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.467576 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.571718 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.571770 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.571785 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.571812 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.571823 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.675588 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.675632 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.675642 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.675661 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.675673 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.778126 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.778168 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.778177 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.778192 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.778201 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.880498 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.880546 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.880554 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.880569 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.880582 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.903213 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientMemory" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.903304 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasNoDiskPressure" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.903320 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeHasSufficientPID" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.903345 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeNotReady" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.903357 4703 setters.go:603] "Node became not ready" node="crc" condition={"type":"Ready","status":"False","lastHeartbeatTime":"2026-02-02T12:52:52Z","lastTransitionTime":"2026-02-02T12:52:52Z","reason":"KubeletNotReady","message":"container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?"} Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.906327 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2025-11-16 11:43:50.947065893 +0000 UTC Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.933467 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.933523 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:52 crc kubenswrapper[4703]: E0202 12:52:52.933644 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:52 crc kubenswrapper[4703]: E0202 12:52:52.933804 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.948016 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt"] Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.948442 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.952770 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.953566 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.953603 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 12:52:52 crc kubenswrapper[4703]: I0202 12:52:52.953586 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.043347 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/476636c8-c6b0-4497-b0a3-32f28cbbf5be-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.043408 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/476636c8-c6b0-4497-b0a3-32f28cbbf5be-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.043662 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/476636c8-c6b0-4497-b0a3-32f28cbbf5be-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.043800 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.043896 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144640 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144683 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/476636c8-c6b0-4497-b0a3-32f28cbbf5be-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144700 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/476636c8-c6b0-4497-b0a3-32f28cbbf5be-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144746 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/476636c8-c6b0-4497-b0a3-32f28cbbf5be-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144762 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144797 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-cvo-updatepayloads\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-cvo-updatepayloads\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.144823 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-ssl-certs\" (UniqueName: \"kubernetes.io/host-path/476636c8-c6b0-4497-b0a3-32f28cbbf5be-etc-ssl-certs\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.145614 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/476636c8-c6b0-4497-b0a3-32f28cbbf5be-service-ca\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.149696 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/476636c8-c6b0-4497-b0a3-32f28cbbf5be-serving-cert\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.160662 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/476636c8-c6b0-4497-b0a3-32f28cbbf5be-kube-api-access\") pod \"cluster-version-operator-5c965bbfc6-4kdbt\" (UID: \"476636c8-c6b0-4497-b0a3-32f28cbbf5be\") " pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.264347 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.492893 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" event={"ID":"476636c8-c6b0-4497-b0a3-32f28cbbf5be","Type":"ContainerStarted","Data":"d226a521b737548fc801a68378d29528771167bb4f0008f04652cbe86d1572dc"} Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.492931 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" event={"ID":"476636c8-c6b0-4497-b0a3-32f28cbbf5be","Type":"ContainerStarted","Data":"eb0ddfa93e264f6841685f677d02a362a92a4ffb602918dce7743d339f20dba5"} Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.907054 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2026-02-24 05:53:03 +0000 UTC, rotation deadline is 2026-01-04 18:10:10.94552043 +0000 UTC Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.907091 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Rotating certificates Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.913520 4703 reflector.go:368] Caches populated for *v1.CertificateSigningRequest from k8s.io/client-go/tools/watch/informerwatcher.go:146 Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.933470 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.933492 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:53 crc kubenswrapper[4703]: E0202 12:52:53.933949 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:53 crc kubenswrapper[4703]: E0202 12:52:53.934035 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:53 crc kubenswrapper[4703]: I0202 12:52:53.954592 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:53 crc kubenswrapper[4703]: E0202 12:52:53.954710 4703 secret.go:188] Couldn't get secret openshift-multus/metrics-daemon-secret: object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:53 crc kubenswrapper[4703]: E0202 12:52:53.954775 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs podName:0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60 nodeName:}" failed. No retries permitted until 2026-02-02 12:53:57.954759598 +0000 UTC m=+164.969967132 (durationBeforeRetry 1m4s). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs") pod "network-metrics-daemon-n2htj" (UID: "0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60") : object "openshift-multus"/"metrics-daemon-secret" not registered Feb 02 12:52:54 crc kubenswrapper[4703]: I0202 12:52:54.517185 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-version/cluster-version-operator-5c965bbfc6-4kdbt" podStartSLOduration=79.517158321 podStartE2EDuration="1m19.517158321s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:52:54.514868671 +0000 UTC m=+101.530076205" watchObservedRunningTime="2026-02-02 12:52:54.517158321 +0000 UTC m=+101.532365855" Feb 02 12:52:54 crc kubenswrapper[4703]: I0202 12:52:54.933509 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:54 crc kubenswrapper[4703]: I0202 12:52:54.933561 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:54 crc kubenswrapper[4703]: E0202 12:52:54.934044 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:54 crc kubenswrapper[4703]: E0202 12:52:54.934176 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:55 crc kubenswrapper[4703]: I0202 12:52:55.933493 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:55 crc kubenswrapper[4703]: I0202 12:52:55.933584 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:55 crc kubenswrapper[4703]: E0202 12:52:55.933695 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:55 crc kubenswrapper[4703]: E0202 12:52:55.933855 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:56 crc kubenswrapper[4703]: I0202 12:52:56.933574 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:56 crc kubenswrapper[4703]: I0202 12:52:56.933655 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:56 crc kubenswrapper[4703]: E0202 12:52:56.933697 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:56 crc kubenswrapper[4703]: E0202 12:52:56.933846 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:57 crc kubenswrapper[4703]: I0202 12:52:57.933441 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:57 crc kubenswrapper[4703]: I0202 12:52:57.933458 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:57 crc kubenswrapper[4703]: E0202 12:52:57.934375 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:52:57 crc kubenswrapper[4703]: E0202 12:52:57.935061 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:58 crc kubenswrapper[4703]: I0202 12:52:58.932987 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:52:58 crc kubenswrapper[4703]: I0202 12:52:58.933055 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:52:58 crc kubenswrapper[4703]: E0202 12:52:58.933103 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:52:58 crc kubenswrapper[4703]: E0202 12:52:58.933179 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:52:59 crc kubenswrapper[4703]: I0202 12:52:59.933226 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:52:59 crc kubenswrapper[4703]: E0202 12:52:59.933424 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:52:59 crc kubenswrapper[4703]: I0202 12:52:59.933567 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:52:59 crc kubenswrapper[4703]: E0202 12:52:59.934013 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:00 crc kubenswrapper[4703]: I0202 12:53:00.932966 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:00 crc kubenswrapper[4703]: I0202 12:53:00.933040 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:00 crc kubenswrapper[4703]: E0202 12:53:00.933493 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:00 crc kubenswrapper[4703]: E0202 12:53:00.933610 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:01 crc kubenswrapper[4703]: I0202 12:53:01.933872 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:01 crc kubenswrapper[4703]: E0202 12:53:01.934009 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:01 crc kubenswrapper[4703]: I0202 12:53:01.934227 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 12:53:01 crc kubenswrapper[4703]: I0202 12:53:01.934307 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:01 crc kubenswrapper[4703]: E0202 12:53:01.934361 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:01 crc kubenswrapper[4703]: E0202 12:53:01.934426 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:53:02 crc kubenswrapper[4703]: I0202 12:53:02.933110 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:02 crc kubenswrapper[4703]: I0202 12:53:02.933322 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:02 crc kubenswrapper[4703]: E0202 12:53:02.933437 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:02 crc kubenswrapper[4703]: E0202 12:53:02.933607 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:03 crc kubenswrapper[4703]: I0202 12:53:03.933752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:03 crc kubenswrapper[4703]: I0202 12:53:03.933821 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:03 crc kubenswrapper[4703]: E0202 12:53:03.935288 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:03 crc kubenswrapper[4703]: E0202 12:53:03.935456 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:04 crc kubenswrapper[4703]: I0202 12:53:04.933470 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:04 crc kubenswrapper[4703]: I0202 12:53:04.933614 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:04 crc kubenswrapper[4703]: E0202 12:53:04.933833 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:04 crc kubenswrapper[4703]: E0202 12:53:04.933624 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:05 crc kubenswrapper[4703]: I0202 12:53:05.933650 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:05 crc kubenswrapper[4703]: I0202 12:53:05.934146 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:05 crc kubenswrapper[4703]: E0202 12:53:05.934293 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:05 crc kubenswrapper[4703]: E0202 12:53:05.935054 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:06 crc kubenswrapper[4703]: I0202 12:53:06.933086 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:06 crc kubenswrapper[4703]: I0202 12:53:06.933185 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:06 crc kubenswrapper[4703]: E0202 12:53:06.933249 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:06 crc kubenswrapper[4703]: E0202 12:53:06.933325 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:07 crc kubenswrapper[4703]: I0202 12:53:07.384380 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:53:07 crc kubenswrapper[4703]: I0202 12:53:07.386093 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 12:53:07 crc kubenswrapper[4703]: E0202 12:53:07.386254 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ovnkube-controller\" with CrashLoopBackOff: \"back-off 40s restarting failed container=ovnkube-controller pod=ovnkube-node-j8d97_openshift-ovn-kubernetes(40400eeb-f9bd-4816-b65f-a25b0c3d021e)\"" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" Feb 02 12:53:07 crc kubenswrapper[4703]: I0202 12:53:07.933187 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:07 crc kubenswrapper[4703]: E0202 12:53:07.933318 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:07 crc kubenswrapper[4703]: I0202 12:53:07.933470 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:07 crc kubenswrapper[4703]: E0202 12:53:07.933623 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:08 crc kubenswrapper[4703]: I0202 12:53:08.933380 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:08 crc kubenswrapper[4703]: I0202 12:53:08.933401 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:08 crc kubenswrapper[4703]: E0202 12:53:08.933533 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:08 crc kubenswrapper[4703]: E0202 12:53:08.933651 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:09 crc kubenswrapper[4703]: I0202 12:53:09.934056 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:09 crc kubenswrapper[4703]: I0202 12:53:09.934056 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:09 crc kubenswrapper[4703]: E0202 12:53:09.934667 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:09 crc kubenswrapper[4703]: E0202 12:53:09.934764 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:10 crc kubenswrapper[4703]: I0202 12:53:10.933335 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:10 crc kubenswrapper[4703]: I0202 12:53:10.933360 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:10 crc kubenswrapper[4703]: E0202 12:53:10.933448 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:10 crc kubenswrapper[4703]: E0202 12:53:10.933612 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.562810 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/1.log" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.564013 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/0.log" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.564068 4703 generic.go:334] "Generic (PLEG): container finished" podID="5fe22056-9a8b-4eba-8776-c50531078e2f" containerID="9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705" exitCode=1 Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.564099 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerDied","Data":"9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705"} Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.564131 4703 scope.go:117] "RemoveContainer" containerID="eeeb3c0282110401813dca83a631f0f97133100fb85310b9919a39511226c3eb" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.564548 4703 scope.go:117] "RemoveContainer" containerID="9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705" Feb 02 12:53:11 crc kubenswrapper[4703]: E0202 12:53:11.564699 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 10s restarting failed container=kube-multus pod=multus-8vjml_openshift-multus(5fe22056-9a8b-4eba-8776-c50531078e2f)\"" pod="openshift-multus/multus-8vjml" podUID="5fe22056-9a8b-4eba-8776-c50531078e2f" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.933043 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:11 crc kubenswrapper[4703]: I0202 12:53:11.933076 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:11 crc kubenswrapper[4703]: E0202 12:53:11.933176 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:11 crc kubenswrapper[4703]: E0202 12:53:11.933249 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:12 crc kubenswrapper[4703]: I0202 12:53:12.567748 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/1.log" Feb 02 12:53:12 crc kubenswrapper[4703]: I0202 12:53:12.933448 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:12 crc kubenswrapper[4703]: E0202 12:53:12.933620 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:12 crc kubenswrapper[4703]: I0202 12:53:12.933465 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:12 crc kubenswrapper[4703]: E0202 12:53:12.933718 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:13 crc kubenswrapper[4703]: E0202 12:53:13.904878 4703 kubelet_node_status.go:497] "Node not becoming ready in time after startup" Feb 02 12:53:13 crc kubenswrapper[4703]: I0202 12:53:13.933077 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:13 crc kubenswrapper[4703]: I0202 12:53:13.933102 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:13 crc kubenswrapper[4703]: E0202 12:53:13.934300 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:13 crc kubenswrapper[4703]: E0202 12:53:13.934495 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:14 crc kubenswrapper[4703]: E0202 12:53:14.177418 4703 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 12:53:14 crc kubenswrapper[4703]: I0202 12:53:14.933246 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:14 crc kubenswrapper[4703]: I0202 12:53:14.933246 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:14 crc kubenswrapper[4703]: E0202 12:53:14.933637 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:14 crc kubenswrapper[4703]: E0202 12:53:14.933734 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:15 crc kubenswrapper[4703]: I0202 12:53:15.932990 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:15 crc kubenswrapper[4703]: I0202 12:53:15.933052 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:15 crc kubenswrapper[4703]: E0202 12:53:15.933113 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:15 crc kubenswrapper[4703]: E0202 12:53:15.933173 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:16 crc kubenswrapper[4703]: I0202 12:53:16.933094 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:16 crc kubenswrapper[4703]: I0202 12:53:16.933118 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:16 crc kubenswrapper[4703]: E0202 12:53:16.933253 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:16 crc kubenswrapper[4703]: E0202 12:53:16.933402 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:17 crc kubenswrapper[4703]: I0202 12:53:17.933730 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:17 crc kubenswrapper[4703]: I0202 12:53:17.933789 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:17 crc kubenswrapper[4703]: E0202 12:53:17.933886 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:17 crc kubenswrapper[4703]: E0202 12:53:17.934070 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:18 crc kubenswrapper[4703]: I0202 12:53:18.932849 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:18 crc kubenswrapper[4703]: I0202 12:53:18.932912 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:18 crc kubenswrapper[4703]: E0202 12:53:18.933011 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:18 crc kubenswrapper[4703]: E0202 12:53:18.933088 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:19 crc kubenswrapper[4703]: E0202 12:53:19.179148 4703 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 12:53:19 crc kubenswrapper[4703]: I0202 12:53:19.932768 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:19 crc kubenswrapper[4703]: I0202 12:53:19.932880 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:19 crc kubenswrapper[4703]: E0202 12:53:19.933373 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:19 crc kubenswrapper[4703]: E0202 12:53:19.933487 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:19 crc kubenswrapper[4703]: I0202 12:53:19.933810 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.592510 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/3.log" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.594858 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerStarted","Data":"b63028c39d04964c40d876b91d5d5da77f2d4eebcfa357f0fc254c3a188aaa63"} Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.595177 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.621082 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podStartSLOduration=105.621063194 podStartE2EDuration="1m45.621063194s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:20.620108219 +0000 UTC m=+127.635315773" watchObservedRunningTime="2026-02-02 12:53:20.621063194 +0000 UTC m=+127.636270728" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.768697 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-n2htj"] Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.768809 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:20 crc kubenswrapper[4703]: E0202 12:53:20.768909 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.933436 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:20 crc kubenswrapper[4703]: I0202 12:53:20.933509 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:20 crc kubenswrapper[4703]: E0202 12:53:20.933526 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:20 crc kubenswrapper[4703]: E0202 12:53:20.933640 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:21 crc kubenswrapper[4703]: I0202 12:53:21.932926 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:21 crc kubenswrapper[4703]: E0202 12:53:21.933375 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:22 crc kubenswrapper[4703]: I0202 12:53:22.933471 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:22 crc kubenswrapper[4703]: I0202 12:53:22.933471 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:22 crc kubenswrapper[4703]: E0202 12:53:22.933586 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:22 crc kubenswrapper[4703]: I0202 12:53:22.933646 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:22 crc kubenswrapper[4703]: E0202 12:53:22.933722 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:22 crc kubenswrapper[4703]: E0202 12:53:22.933903 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:23 crc kubenswrapper[4703]: I0202 12:53:23.933360 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:23 crc kubenswrapper[4703]: E0202 12:53:23.934430 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:23 crc kubenswrapper[4703]: I0202 12:53:23.934549 4703 scope.go:117] "RemoveContainer" containerID="9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705" Feb 02 12:53:24 crc kubenswrapper[4703]: E0202 12:53:24.179921 4703 kubelet.go:2916] "Container runtime network not ready" networkReady="NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 12:53:24 crc kubenswrapper[4703]: I0202 12:53:24.610429 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/1.log" Feb 02 12:53:24 crc kubenswrapper[4703]: I0202 12:53:24.610535 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerStarted","Data":"6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c"} Feb 02 12:53:24 crc kubenswrapper[4703]: I0202 12:53:24.933894 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:24 crc kubenswrapper[4703]: I0202 12:53:24.934008 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:24 crc kubenswrapper[4703]: E0202 12:53:24.934114 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:24 crc kubenswrapper[4703]: I0202 12:53:24.934008 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:24 crc kubenswrapper[4703]: E0202 12:53:24.934249 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:24 crc kubenswrapper[4703]: E0202 12:53:24.934480 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:25 crc kubenswrapper[4703]: I0202 12:53:25.933995 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:25 crc kubenswrapper[4703]: E0202 12:53:25.934398 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:26 crc kubenswrapper[4703]: I0202 12:53:26.933621 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:26 crc kubenswrapper[4703]: I0202 12:53:26.933621 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:26 crc kubenswrapper[4703]: I0202 12:53:26.933816 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:26 crc kubenswrapper[4703]: E0202 12:53:26.934058 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:26 crc kubenswrapper[4703]: E0202 12:53:26.934515 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:26 crc kubenswrapper[4703]: E0202 12:53:26.935106 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:27 crc kubenswrapper[4703]: I0202 12:53:27.933320 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:27 crc kubenswrapper[4703]: E0202 12:53:27.933611 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" podUID="9d751cbb-f2e2-430d-9754-c882a5e924a5" Feb 02 12:53:28 crc kubenswrapper[4703]: I0202 12:53:28.933037 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:28 crc kubenswrapper[4703]: E0202 12:53:28.933164 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-multus/network-metrics-daemon-n2htj" podUID="0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60" Feb 02 12:53:28 crc kubenswrapper[4703]: I0202 12:53:28.933291 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:28 crc kubenswrapper[4703]: E0202 12:53:28.933416 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" podUID="5fe485a1-e14f-4c09-b5b9-f252bc42b7e8" Feb 02 12:53:28 crc kubenswrapper[4703]: I0202 12:53:28.933929 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:28 crc kubenswrapper[4703]: E0202 12:53:28.934095 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="network is not ready: container runtime network not ready: NetworkReady=false reason:NetworkPluginNotReady message:Network plugin returns error: no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-network-diagnostics/network-check-target-xd92c" podUID="3b6479f0-333b-4a96-9adf-2099afdc2447" Feb 02 12:53:29 crc kubenswrapper[4703]: I0202 12:53:29.933175 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:29 crc kubenswrapper[4703]: I0202 12:53:29.935798 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 12:53:29 crc kubenswrapper[4703]: I0202 12:53:29.936153 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.932962 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.933057 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.933222 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.935330 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.935473 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.936262 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 12:53:30 crc kubenswrapper[4703]: I0202 12:53:30.936608 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.693257 4703 kubelet_node_status.go:724] "Recording event message for node" node="crc" event="NodeReady" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.729238 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.729581 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.733484 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.733590 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.733762 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.734024 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.734267 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.734375 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.734485 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.734603 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.735367 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bsrg2"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.735828 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.741542 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.742538 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.746162 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.747168 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.753566 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.753756 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.753576 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.753578 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.753605 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.754985 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.755624 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.755829 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.756545 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.757083 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.757798 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758021 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758180 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758330 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758486 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758520 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758553 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758642 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758783 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758864 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.758841 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.760452 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.761152 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/console-f9d7485db-bb2nk"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.761446 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2w9hx"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.761587 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.761925 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.762069 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lm8wl"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.762238 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.763392 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.763434 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.763614 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.764577 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-console/downloads-7954f5f757-wwmdc"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.764948 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.765354 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.765913 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.767395 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.767570 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.767796 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.767916 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768013 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768041 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768120 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768336 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768125 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768487 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.768944 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.769255 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.769728 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.769885 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.770371 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.779424 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-brgxl"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.779984 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9jft7"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.780494 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.780942 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.783476 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.783761 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.786484 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.786736 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787513 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787548 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-service-ca\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787572 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-auth-proxy-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787604 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wb5j6\" (UniqueName: \"kubernetes.io/projected/7837a007-6c6a-4856-8b9a-a8397e864ceb-kube-api-access-wb5j6\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787626 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787649 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-service-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787671 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-images\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787696 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mlhrs\" (UniqueName: \"kubernetes.io/projected/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-kube-api-access-mlhrs\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787719 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-config\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787752 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787773 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nh4d4\" (UniqueName: \"kubernetes.io/projected/67d0ea85-5c1d-4420-afaa-0647a6c1eb93-kube-api-access-nh4d4\") pod \"downloads-7954f5f757-wwmdc\" (UID: \"67d0ea85-5c1d-4420-afaa-0647a6c1eb93\") " pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787792 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787813 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-trusted-ca\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787834 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-clzwl\" (UniqueName: \"kubernetes.io/projected/e37ec3da-d2b7-4a09-a48b-747ba887fa26-kube-api-access-clzwl\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787855 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787890 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a5941c46-e423-4c74-817a-1f08e831d439-audit-dir\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787914 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-r96dl\" (UniqueName: \"kubernetes.io/projected/becc124e-6c1b-43c2-af98-0c6df242b0d9-kube-api-access-r96dl\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787939 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-config\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787962 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txl6z\" (UniqueName: \"kubernetes.io/projected/a5941c46-e423-4c74-817a-1f08e831d439-kube-api-access-txl6z\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.787988 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788024 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-config\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788055 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e37ec3da-d2b7-4a09-a48b-747ba887fa26-serving-cert\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788080 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788125 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788152 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sr2h5\" (UniqueName: \"kubernetes.io/projected/d5ce7c42-1183-4e5d-8a51-4b817b40de14-kube-api-access-sr2h5\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788175 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mtcqg\" (UniqueName: \"kubernetes.io/projected/335fb805-5368-4ab2-bdb6-d642aeb11902-kube-api-access-mtcqg\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788321 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788664 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-trusted-ca-bundle\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788715 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/becc124e-6c1b-43c2-af98-0c6df242b0d9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788836 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-serving-cert\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788884 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788945 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4zkxd\" (UniqueName: \"kubernetes.io/projected/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-kube-api-access-4zkxd\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.788979 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-oauth-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789048 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789087 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-t5s2l\" (UniqueName: \"kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789130 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17eac293-e5de-49da-b77d-c511b866a958-serving-cert\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789169 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-oauth-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789190 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789223 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/335fb805-5368-4ab2-bdb6-d642aeb11902-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789253 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tml5b\" (UniqueName: \"kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789347 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789374 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-audit-policies\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789405 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789427 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789459 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d5ce7c42-1183-4e5d-8a51-4b817b40de14-machine-approver-tls\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789552 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7837a007-6c6a-4856-8b9a-a8397e864ceb-serving-cert\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789577 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-etcd-client\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789625 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m5bk2\" (UniqueName: \"kubernetes.io/projected/17eac293-e5de-49da-b77d-c511b866a958-kube-api-access-m5bk2\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789648 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/17eac293-e5de-49da-b77d-c511b866a958-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.789666 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-encryption-config\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791207 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791339 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791506 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791620 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791726 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.791964 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.792417 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.792522 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.792613 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.792707 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.792936 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793079 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793175 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793371 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793467 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793727 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.793873 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.818939 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.819256 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.819607 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.819710 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.820885 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.821561 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.821926 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.824493 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839067 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839119 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839070 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839329 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839488 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839611 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839856 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.839893 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.840001 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.840084 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.840227 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.840964 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841076 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841098 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841203 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841253 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841336 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841368 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841533 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841812 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841895 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.841938 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.842044 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.842152 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.842217 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.842334 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.842702 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.843068 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.843146 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.843664 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.844419 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4xm9p"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.844893 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.845786 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.845911 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.845949 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.846212 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.846441 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.846791 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.848332 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.850371 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.851645 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.853909 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.854419 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.854744 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.855565 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.857501 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.858054 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.858369 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.858567 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.859446 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.865849 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.866034 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.867068 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress/router-default-5444994796-9rxcr"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.868054 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.886407 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.887627 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.888232 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.888539 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.891672 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.892175 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.901656 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q4xdj"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.933545 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.934034 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.934945 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.935121 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937227 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7837a007-6c6a-4856-8b9a-a8397e864ceb-serving-cert\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937263 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-etcd-client\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937313 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m5bk2\" (UniqueName: \"kubernetes.io/projected/17eac293-e5de-49da-b77d-c511b866a958-kube-api-access-m5bk2\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937335 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-encryption-config\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937332 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937353 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/17eac293-e5de-49da-b77d-c511b866a958-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937373 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937391 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-service-ca\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937411 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ffae680-60cc-4057-b1b5-f95418327d4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937442 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wb5j6\" (UniqueName: \"kubernetes.io/projected/7837a007-6c6a-4856-8b9a-a8397e864ceb-kube-api-access-wb5j6\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937465 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937489 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-auth-proxy-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937519 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-service-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937544 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-images\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937567 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mlhrs\" (UniqueName: \"kubernetes.io/projected/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-kube-api-access-mlhrs\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937588 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-cabundle\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937602 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937612 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-config\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937637 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937659 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nh4d4\" (UniqueName: \"kubernetes.io/projected/67d0ea85-5c1d-4420-afaa-0647a6c1eb93-kube-api-access-nh4d4\") pod \"downloads-7954f5f757-wwmdc\" (UID: \"67d0ea85-5c1d-4420-afaa-0647a6c1eb93\") " pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937684 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937710 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-trusted-ca\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937717 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937734 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-clzwl\" (UniqueName: \"kubernetes.io/projected/e37ec3da-d2b7-4a09-a48b-747ba887fa26-kube-api-access-clzwl\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937759 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937791 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a5941c46-e423-4c74-817a-1f08e831d439-audit-dir\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937815 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-r96dl\" (UniqueName: \"kubernetes.io/projected/becc124e-6c1b-43c2-af98-0c6df242b0d9-kube-api-access-r96dl\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937845 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-config\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937874 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txl6z\" (UniqueName: \"kubernetes.io/projected/a5941c46-e423-4c74-817a-1f08e831d439-kube-api-access-txl6z\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937901 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937928 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-config\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937949 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qjs5t\" (UniqueName: \"kubernetes.io/projected/3ffae680-60cc-4057-b1b5-f95418327d4d-kube-api-access-qjs5t\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937975 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4dnzx\" (UniqueName: \"kubernetes.io/projected/f2f2ff94-e912-47d2-9bdb-d5d97b725715-kube-api-access-4dnzx\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.937999 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ffae680-60cc-4057-b1b5-f95418327d4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938024 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e37ec3da-d2b7-4a09-a48b-747ba887fa26-serving-cert\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938047 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938085 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-key\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938111 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938138 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sr2h5\" (UniqueName: \"kubernetes.io/projected/d5ce7c42-1183-4e5d-8a51-4b817b40de14-kube-api-access-sr2h5\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938163 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mtcqg\" (UniqueName: \"kubernetes.io/projected/335fb805-5368-4ab2-bdb6-d642aeb11902-kube-api-access-mtcqg\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938208 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938227 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-trusted-ca-bundle\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938248 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/becc124e-6c1b-43c2-af98-0c6df242b0d9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938275 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-serving-cert\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938315 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938342 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4zkxd\" (UniqueName: \"kubernetes.io/projected/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-kube-api-access-4zkxd\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938370 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938390 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-t5s2l\" (UniqueName: \"kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938411 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-oauth-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938435 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17eac293-e5de-49da-b77d-c511b866a958-serving-cert\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938458 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-oauth-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938481 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938507 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938530 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/335fb805-5368-4ab2-bdb6-d642aeb11902-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938552 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tml5b\" (UniqueName: \"kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938573 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-audit-policies\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938595 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938617 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.938638 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d5ce7c42-1183-4e5d-8a51-4b817b40de14-machine-approver-tls\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.939390 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/a5941c46-e423-4c74-817a-1f08e831d439-audit-dir\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.940625 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-config\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.941238 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.941355 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.941370 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/d5ce7c42-1183-4e5d-8a51-4b817b40de14-auth-proxy-config\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.941649 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.942210 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-config\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.942258 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-etcd-serving-ca\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.942430 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-service-ca\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.944371 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.945038 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.949364 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.949622 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.955441 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.955913 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.955951 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-etcd-client\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.955964 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-config\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.956778 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/335fb805-5368-4ab2-bdb6-d642aeb11902-images\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.956833 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-trusted-ca-bundle\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.957381 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"available-featuregates\" (UniqueName: \"kubernetes.io/empty-dir/17eac293-e5de-49da-b77d-c511b866a958-available-featuregates\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.957505 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-service-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.957951 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/7837a007-6c6a-4856-8b9a-a8397e864ceb-trusted-ca\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.958522 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7837a007-6c6a-4856-8b9a-a8397e864ceb-serving-cert\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.962693 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-approver-tls\" (UniqueName: \"kubernetes.io/secret/d5ce7c42-1183-4e5d-8a51-4b817b40de14-machine-approver-tls\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.963057 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/17eac293-e5de-49da-b77d-c511b866a958-serving-cert\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.963183 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.963576 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.964061 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.964853 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/a5941c46-e423-4c74-817a-1f08e831d439-audit-policies\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.965338 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-config\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.965578 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-encryption-config\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.966843 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/e37ec3da-d2b7-4a09-a48b-747ba887fa26-trusted-ca-bundle\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.970304 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-serving-cert\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.971454 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.972150 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.972844 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.973221 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.975368 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.976080 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.976106 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"oauth-serving-cert\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-oauth-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.976220 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lbjjm"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.977122 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.977760 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-427gh"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.977877 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.978555 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"machine-api-operator-tls\" (UniqueName: \"kubernetes.io/secret/335fb805-5368-4ab2-bdb6-d642aeb11902-machine-api-operator-tls\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.980172 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e37ec3da-d2b7-4a09-a48b-747ba887fa26-serving-cert\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.981472 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-config\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.982115 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-trusted-ca-bundle\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.985129 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.989047 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.996817 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.997196 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.998507 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.998536 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-machine-config-operator/machine-config-server-29dfl"] Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.999061 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:33 crc kubenswrapper[4703]: I0202 12:53:33.999784 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.000524 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-serving-cert\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-serving-cert\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.000997 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2w9hx"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.002108 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wwmdc"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.002492 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"console-oauth-config\" (UniqueName: \"kubernetes.io/secret/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-console-oauth-config\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.002992 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"samples-operator-tls\" (UniqueName: \"kubernetes.io/secret/becc124e-6c1b-43c2-af98-0c6df242b0d9-samples-operator-tls\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.004536 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a5941c46-e423-4c74-817a-1f08e831d439-serving-cert\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.004649 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.009215 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.011863 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.012064 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.012749 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9jft7"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.014321 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.016211 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.018705 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.020014 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.021329 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.022736 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.023985 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-brgxl"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.025056 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.026434 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.027829 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bb2nk"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.029250 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.030375 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.031309 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.032244 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.032453 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.036333 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.037166 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q4xdj"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.039403 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qjs5t\" (UniqueName: \"kubernetes.io/projected/3ffae680-60cc-4057-b1b5-f95418327d4d-kube-api-access-qjs5t\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.039445 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4dnzx\" (UniqueName: \"kubernetes.io/projected/f2f2ff94-e912-47d2-9bdb-d5d97b725715-kube-api-access-4dnzx\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.039463 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ffae680-60cc-4057-b1b5-f95418327d4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.039494 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-key\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.039831 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.040668 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ffae680-60cc-4057-b1b5-f95418327d4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.040816 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-cabundle\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.041509 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3ffae680-60cc-4057-b1b5-f95418327d4d-config\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.041571 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4xm9p"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.046049 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3ffae680-60cc-4057-b1b5-f95418327d4d-serving-cert\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.048461 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lm8wl"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.053843 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.056424 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.060126 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.061899 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bsrg2"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.063431 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.065523 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-dns/dns-default-ptg9n"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.067630 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lbjjm"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.067718 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.068561 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.069829 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sbswk"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.071363 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.072683 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.073153 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.072756 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.074920 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ptg9n"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.076208 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sbswk"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.077795 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-427gh"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.079473 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.080337 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ingress-canary/ingress-canary-crnml"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.083147 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-crnml"] Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.083259 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.111909 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.132358 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.152019 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.171807 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.191596 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.211480 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.233425 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.244149 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-key\" (UniqueName: \"kubernetes.io/secret/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-key\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.252623 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.262899 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"signing-cabundle\" (UniqueName: \"kubernetes.io/configmap/f2f2ff94-e912-47d2-9bdb-d5d97b725715-signing-cabundle\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.272591 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.292663 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.311969 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.332097 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.351897 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.371745 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.392162 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.411650 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.431756 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.457827 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.472192 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.493226 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.513036 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.552109 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.572622 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.594749 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.612531 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.632082 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.652614 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.672103 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.691855 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.711658 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.731936 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.752032 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.771995 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.790966 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.811596 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.832207 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.852894 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.872035 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.892537 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.911893 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.940438 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.950735 4703 request.go:700] Waited for 1.01351601s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/openshift-multus/secrets?fieldSelector=metadata.name%3Dmultus-ac-dockercfg-9lkdf&limit=500&resourceVersion=0 Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.952694 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 12:53:34 crc kubenswrapper[4703]: I0202 12:53:34.988752 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-r96dl\" (UniqueName: \"kubernetes.io/projected/becc124e-6c1b-43c2-af98-0c6df242b0d9-kube-api-access-r96dl\") pod \"cluster-samples-operator-665b6dd947-rxz57\" (UID: \"becc124e-6c1b-43c2-af98-0c6df242b0d9\") " pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.012719 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txl6z\" (UniqueName: \"kubernetes.io/projected/a5941c46-e423-4c74-817a-1f08e831d439-kube-api-access-txl6z\") pod \"apiserver-7bbb656c7d-hh7ph\" (UID: \"a5941c46-e423-4c74-817a-1f08e831d439\") " pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.031821 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4zkxd\" (UniqueName: \"kubernetes.io/projected/cb600eb2-3a8a-4303-b99b-a6c40cd9149e-kube-api-access-4zkxd\") pod \"console-f9d7485db-bb2nk\" (UID: \"cb600eb2-3a8a-4303-b99b-a6c40cd9149e\") " pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.053888 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wb5j6\" (UniqueName: \"kubernetes.io/projected/7837a007-6c6a-4856-8b9a-a8397e864ceb-kube-api-access-wb5j6\") pod \"console-operator-58897d9998-2w9hx\" (UID: \"7837a007-6c6a-4856-8b9a-a8397e864ceb\") " pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.067991 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mtcqg\" (UniqueName: \"kubernetes.io/projected/335fb805-5368-4ab2-bdb6-d642aeb11902-kube-api-access-mtcqg\") pod \"machine-api-operator-5694c8668f-bsrg2\" (UID: \"335fb805-5368-4ab2-bdb6-d642aeb11902\") " pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.070661 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.091908 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sr2h5\" (UniqueName: \"kubernetes.io/projected/d5ce7c42-1183-4e5d-8a51-4b817b40de14-kube-api-access-sr2h5\") pod \"machine-approver-56656f9798-dmmgt\" (UID: \"d5ce7c42-1183-4e5d-8a51-4b817b40de14\") " pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.110096 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-t5s2l\" (UniqueName: \"kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l\") pod \"route-controller-manager-6576b87f9c-jhwld\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.134915 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.135437 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m5bk2\" (UniqueName: \"kubernetes.io/projected/17eac293-e5de-49da-b77d-c511b866a958-kube-api-access-m5bk2\") pod \"openshift-config-operator-7777fb866f-7dn5j\" (UID: \"17eac293-e5de-49da-b77d-c511b866a958\") " pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.167909 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nh4d4\" (UniqueName: \"kubernetes.io/projected/67d0ea85-5c1d-4420-afaa-0647a6c1eb93-kube-api-access-nh4d4\") pod \"downloads-7954f5f757-wwmdc\" (UID: \"67d0ea85-5c1d-4420-afaa-0647a6c1eb93\") " pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.171636 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mlhrs\" (UniqueName: \"kubernetes.io/projected/9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b-kube-api-access-mlhrs\") pod \"openshift-apiserver-operator-796bbdcf4f-tft76\" (UID: \"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b\") " pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.189095 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-clzwl\" (UniqueName: \"kubernetes.io/projected/e37ec3da-d2b7-4a09-a48b-747ba887fa26-kube-api-access-clzwl\") pod \"authentication-operator-69f744f599-lm8wl\" (UID: \"e37ec3da-d2b7-4a09-a48b-747ba887fa26\") " pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.209192 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tml5b\" (UniqueName: \"kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b\") pod \"controller-manager-879f6c89f-7clns\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.215437 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.230808 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.232384 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.242522 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.253481 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.253523 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.261639 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.263648 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.269983 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.272886 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.282536 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.293031 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.306924 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.313893 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.341364 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.343063 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.354926 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.356111 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.363990 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console-operator/console-operator-58897d9998-2w9hx"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.364638 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.374642 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 12:53:35 crc kubenswrapper[4703]: W0202 12:53:35.387646 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podd5ce7c42_1183_4e5d_8a51_4b817b40de14.slice/crio-bf7c246640e903ffa6b16e44044c982f35df0b14a0a27c71613106059655c517 WatchSource:0}: Error finding container bf7c246640e903ffa6b16e44044c982f35df0b14a0a27c71613106059655c517: Status 404 returned error can't find the container with id bf7c246640e903ffa6b16e44044c982f35df0b14a0a27c71613106059655c517 Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.393224 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 12:53:35 crc kubenswrapper[4703]: W0202 12:53:35.410242 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7837a007_6c6a_4856_8b9a_a8397e864ceb.slice/crio-556c0fd72cdbfc96d8f300edb88c81cb5117013a207e22d1cc292a6fb8f152d6 WatchSource:0}: Error finding container 556c0fd72cdbfc96d8f300edb88c81cb5117013a207e22d1cc292a6fb8f152d6: Status 404 returned error can't find the container with id 556c0fd72cdbfc96d8f300edb88c81cb5117013a207e22d1cc292a6fb8f152d6 Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.411369 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.433424 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.456488 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/console-f9d7485db-bb2nk"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.456751 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.473666 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.494307 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.512390 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.530519 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication-operator/authentication-operator-69f744f599-lm8wl"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.531487 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.557486 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 12:53:35 crc kubenswrapper[4703]: W0202 12:53:35.569900 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcb600eb2_3a8a_4303_b99b_a6c40cd9149e.slice/crio-d11c943652701a7867012307a9d7edeb013e256ba843fd19235e65791377f92a WatchSource:0}: Error finding container d11c943652701a7867012307a9d7edeb013e256ba843fd19235e65791377f92a: Status 404 returned error can't find the container with id d11c943652701a7867012307a9d7edeb013e256ba843fd19235e65791377f92a Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.571923 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.591609 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.612585 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.636263 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.646058 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" event={"ID":"becc124e-6c1b-43c2-af98-0c6df242b0d9","Type":"ContainerStarted","Data":"5807f38f7fed9f804c3d5eba3102770bd5d47366b0c04751b120463aa2f3de64"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.647715 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bb2nk" event={"ID":"cb600eb2-3a8a-4303-b99b-a6c40cd9149e","Type":"ContainerStarted","Data":"d11c943652701a7867012307a9d7edeb013e256ba843fd19235e65791377f92a"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.648371 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" event={"ID":"e37ec3da-d2b7-4a09-a48b-747ba887fa26","Type":"ContainerStarted","Data":"23057886a4a3335ab12bb1cb2d3fdd46cdeee780ea350dfec8ea9acc20ffb0bd"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.649180 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" event={"ID":"d5ce7c42-1183-4e5d-8a51-4b817b40de14","Type":"ContainerStarted","Data":"bf7c246640e903ffa6b16e44044c982f35df0b14a0a27c71613106059655c517"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.650994 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" event={"ID":"7837a007-6c6a-4856-8b9a-a8397e864ceb","Type":"ContainerStarted","Data":"362da0251c737847a5f11a293ccedd7a16b41787e78e0657f37888e67c343c08"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.651021 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" event={"ID":"7837a007-6c6a-4856-8b9a-a8397e864ceb","Type":"ContainerStarted","Data":"556c0fd72cdbfc96d8f300edb88c81cb5117013a207e22d1cc292a6fb8f152d6"} Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.651510 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.669335 4703 patch_prober.go:28] interesting pod/console-operator-58897d9998-2w9hx container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" start-of-body= Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.669389 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" podUID="7837a007-6c6a-4856-8b9a-a8397e864ceb" containerName="console-operator" probeResult="failure" output="Get \"https://10.217.0.9:8443/readyz\": dial tcp 10.217.0.9:8443: connect: connection refused" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.683496 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.685238 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.691198 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.713860 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.732899 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.753560 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.763261 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:53:35 crc kubenswrapper[4703]: W0202 12:53:35.771426 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod644a0236_d3fc_404d_a4da_203ca11b1316.slice/crio-b90e25cf06afe529b73104c71b9033bf89a6790aa2169ccbb5a40bd785f07785 WatchSource:0}: Error finding container b90e25cf06afe529b73104c71b9033bf89a6790aa2169ccbb5a40bd785f07785: Status 404 returned error can't find the container with id b90e25cf06afe529b73104c71b9033bf89a6790aa2169ccbb5a40bd785f07785 Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.772372 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.792765 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.812938 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.832400 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.868106 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.869180 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.872294 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qjs5t\" (UniqueName: \"kubernetes.io/projected/3ffae680-60cc-4057-b1b5-f95418327d4d-kube-api-access-qjs5t\") pod \"openshift-controller-manager-operator-756b6f6bc6-q6hnb\" (UID: \"3ffae680-60cc-4057-b1b5-f95418327d4d\") " pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.888068 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4dnzx\" (UniqueName: \"kubernetes.io/projected/f2f2ff94-e912-47d2-9bdb-d5d97b725715-kube-api-access-4dnzx\") pod \"service-ca-9c57cc56f-4xm9p\" (UID: \"f2f2ff94-e912-47d2-9bdb-d5d97b725715\") " pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.893066 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.896685 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.912036 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.932655 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.952005 4703 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.958047 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.969934 4703 request.go:700] Waited for 1.896277172s due to client-side throttling, not priority and fairness, request: GET:https://api-int.crc.testing:6443/api/v1/namespaces/hostpath-provisioner/configmaps?fieldSelector=metadata.name%3Dkube-root-ca.crt&limit=500&resourceVersion=0 Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.974056 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.990628 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-console/downloads-7954f5f757-wwmdc"] Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.991167 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 12:53:35 crc kubenswrapper[4703]: I0202 12:53:35.996821 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph"] Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.003023 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/machine-api-operator-5694c8668f-bsrg2"] Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.005386 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76"] Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.013822 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.031254 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.053193 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.078807 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171450 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171508 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171560 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171583 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171600 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171637 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-default-certificate\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171669 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171686 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p68pn\" (UniqueName: \"kubernetes.io/projected/7141add7-a1ba-41ed-92e4-51a7684a90be-kube-api-access-p68pn\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171721 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-69kwb\" (UniqueName: \"kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171741 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kn5kv\" (UniqueName: \"kubernetes.io/projected/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-kube-api-access-kn5kv\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171764 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171785 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.171803 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.173687 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.173827 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.173917 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-67hhn\" (UniqueName: \"kubernetes.io/projected/71a6d661-54a6-430a-902b-656503b8d12a-kube-api-access-67hhn\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.173975 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7141add7-a1ba-41ed-92e4-51a7684a90be-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174003 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xz4ld\" (UniqueName: \"kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174094 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174117 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174145 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174169 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b55617e-e44e-48c2-890a-2c2becd1ebcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174214 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174249 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-metrics-certs\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174740 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174810 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-49vn6\" (UniqueName: \"kubernetes.io/projected/73892a3f-54ec-47ac-9081-5d1a83088794-kube-api-access-49vn6\") pod \"migrator-59844c95c7-ksnn8\" (UID: \"73892a3f-54ec-47ac-9081-5d1a83088794\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.174963 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cm5wb\" (UniqueName: \"kubernetes.io/projected/65f8642a-46ce-4023-a4bb-168f8a68e3ec-kube-api-access-cm5wb\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.175680 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-srv-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.175798 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.175885 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/31057271-37ac-4277-9ab0-3ef846dfb6dd-metrics-tls\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.175912 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-proxy-tls\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.175941 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176027 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176058 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-serving-cert\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176127 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/71a6d661-54a6-430a-902b-656503b8d12a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176171 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vtt9k\" (UniqueName: \"kubernetes.io/projected/31057271-37ac-4277-9ab0-3ef846dfb6dd-kube-api-access-vtt9k\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176194 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176219 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176372 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-svx6f\" (UniqueName: \"kubernetes.io/projected/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-kube-api-access-svx6f\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.176459 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:36.676440215 +0000 UTC m=+143.691647809 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176513 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176587 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176613 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-service-ca-bundle\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.176902 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.178799 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-config\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.178953 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b55617e-e44e-48c2-890a-2c2becd1ebcd-config\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.179839 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dj8dl\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.179910 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180081 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gp74w\" (UniqueName: \"kubernetes.io/projected/71747bde-f844-47e2-aad8-abf178e2f5bd-kube-api-access-gp74w\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180142 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3b55617e-e44e-48c2-890a-2c2becd1ebcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180189 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j6jjs\" (UniqueName: \"kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180327 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-service-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180357 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180388 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-stats-auth\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180414 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.180438 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-client\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.225114 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb"] Feb 02 12:53:36 crc kubenswrapper[4703]: W0202 12:53:36.250185 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3ffae680_60cc_4057_b1b5_f95418327d4d.slice/crio-bca1db9e23be3955e6d4f0d5b7ad7f6d0d426d5f4c0fbeb1a7db22232a4cc8ad WatchSource:0}: Error finding container bca1db9e23be3955e6d4f0d5b7ad7f6d0d426d5f4c0fbeb1a7db22232a4cc8ad: Status 404 returned error can't find the container with id bca1db9e23be3955e6d4f0d5b7ad7f6d0d426d5f4c0fbeb1a7db22232a4cc8ad Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282075 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.282260 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:36.782232741 +0000 UTC m=+143.797440275 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282843 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282880 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282914 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3b55617e-e44e-48c2-890a-2c2becd1ebcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282942 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j6jjs\" (UniqueName: \"kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.282988 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-srv-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283017 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e1c02e39-f7b4-4949-ba76-132058e2587c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283049 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-stats-auth\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283079 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd14d895-97b2-4840-aa5b-ed942b6a89ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283108 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pckbl\" (UniqueName: \"kubernetes.io/projected/45adf198-876a-4b6e-b75e-348e84079e86-kube-api-access-pckbl\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283144 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283170 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283198 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-profile-collector-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283221 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283247 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-webhook-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283318 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-metrics-tls\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283371 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283398 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7874x\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-kube-api-access-7874x\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283417 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j5z6x\" (UniqueName: \"kubernetes.io/projected/b9431171-295f-4b2f-9a03-3325d993850c-kube-api-access-j5z6x\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283435 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-encryption-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283457 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-csi-data-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283475 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-node-bootstrap-token\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283504 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-plugins-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283553 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-69kwb\" (UniqueName: \"kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283588 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283617 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283640 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283669 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283695 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-trusted-ca\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283734 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283774 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-metrics-certs\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.283850 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284161 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cm5wb\" (UniqueName: \"kubernetes.io/projected/65f8642a-46ce-4023-a4bb-168f8a68e3ec-kube-api-access-cm5wb\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284644 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284686 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-proxy-tls\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284717 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-images\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284762 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9431171-295f-4b2f-9a03-3325d993850c-metrics-tls\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284787 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-node-pullsecrets\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284815 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30b86d7c-a747-40c2-b76f-76eec0f9439c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284856 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-serving-cert\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284880 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/590dfde0-b415-4fb3-85c5-b8d5376617c8-tmpfs\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284919 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/71a6d661-54a6-430a-902b-656503b8d12a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284945 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-cert\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.284986 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285014 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-svx6f\" (UniqueName: \"kubernetes.io/projected/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-kube-api-access-svx6f\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285039 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-registration-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285082 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285114 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-service-ca-bundle\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285160 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285203 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285231 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2wxzn\" (UniqueName: \"kubernetes.io/projected/acda3264-e715-41ba-bd3f-843e23ecfd82-kube-api-access-2wxzn\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285260 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-client\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285301 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285312 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gp74w\" (UniqueName: \"kubernetes.io/projected/71747bde-f844-47e2-aad8-abf178e2f5bd-kube-api-access-gp74w\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285348 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-service-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285373 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd14d895-97b2-4840-aa5b-ed942b6a89ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285401 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-89fq5\" (UniqueName: \"kubernetes.io/projected/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-kube-api-access-89fq5\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285429 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.285586 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.287559 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:36.787537023 +0000 UTC m=+143.802744747 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.287805 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.287918 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.287970 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-client\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288004 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit-dir\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288167 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288211 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288242 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288273 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58dj8\" (UniqueName: \"kubernetes.io/projected/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-kube-api-access-58dj8\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288321 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-image-import-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288354 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b86d7c-a747-40c2-b76f-76eec0f9439c-config\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288504 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288490 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-default-certificate\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288570 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-config\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288596 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-socket-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288637 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288670 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p68pn\" (UniqueName: \"kubernetes.io/projected/7141add7-a1ba-41ed-92e4-51a7684a90be-kube-api-access-p68pn\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288706 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kn5kv\" (UniqueName: \"kubernetes.io/projected/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-kube-api-access-kn5kv\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288897 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288928 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mzrdr\" (UniqueName: \"kubernetes.io/projected/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-kube-api-access-mzrdr\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288952 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-serving-cert\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.288980 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-mountpoint-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289000 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289023 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1c02e39-f7b4-4949-ba76-132058e2587c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289053 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-67hhn\" (UniqueName: \"kubernetes.io/projected/71a6d661-54a6-430a-902b-656503b8d12a-kube-api-access-67hhn\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289071 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7141add7-a1ba-41ed-92e4-51a7684a90be-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289089 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xz4ld\" (UniqueName: \"kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289109 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289138 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-service-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-service-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289201 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b55617e-e44e-48c2-890a-2c2becd1ebcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289232 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289256 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ss6w6\" (UniqueName: \"kubernetes.io/projected/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-kube-api-access-ss6w6\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289330 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289351 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-n8qsw\" (UniqueName: \"kubernetes.io/projected/590dfde0-b415-4fb3-85c5-b8d5376617c8-kube-api-access-n8qsw\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289369 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9431171-295f-4b2f-9a03-3325d993850c-config-volume\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289398 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-49vn6\" (UniqueName: \"kubernetes.io/projected/73892a3f-54ec-47ac-9081-5d1a83088794-kube-api-access-49vn6\") pod \"migrator-59844c95c7-ksnn8\" (UID: \"73892a3f-54ec-47ac-9081-5d1a83088794\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289409 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289422 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ldq6f\" (UniqueName: \"kubernetes.io/projected/50bd392d-305c-4e83-9769-8313e7f6a514-kube-api-access-ldq6f\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289497 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289547 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd14d895-97b2-4840-aa5b-ed942b6a89ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289577 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-serving-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289613 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-srv-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289643 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/31057271-37ac-4277-9ab0-3ef846dfb6dd-metrics-tls\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289675 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289710 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289746 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acda3264-e715-41ba-bd3f-843e23ecfd82-proxy-tls\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289772 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289546 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289910 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.289960 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d8djz\" (UniqueName: \"kubernetes.io/projected/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-kube-api-access-d8djz\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290011 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-serving-cert\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290024 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290040 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vtt9k\" (UniqueName: \"kubernetes.io/projected/31057271-37ac-4277-9ab0-3ef846dfb6dd-kube-api-access-vtt9k\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290067 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290092 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-certs\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290116 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xgsnt\" (UniqueName: \"kubernetes.io/projected/7f576bdf-9ebc-46a8-8079-cb105274bba2-kube-api-access-xgsnt\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290314 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290347 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-config\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290371 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290397 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99dbd\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-kube-api-access-99dbd\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290417 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30b86d7c-a747-40c2-b76f-76eec0f9439c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290444 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b55617e-e44e-48c2-890a-2c2becd1ebcd-config\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290468 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.290494 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dj8dl\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.293422 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"service-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-service-ca-bundle\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.297117 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.297337 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-ca\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-ca\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.298544 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-profile-collector-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.298568 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.299149 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"control-plane-machine-set-operator-tls\" (UniqueName: \"kubernetes.io/secret/71a6d661-54a6-430a-902b-656503b8d12a-control-plane-machine-set-operator-tls\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.299286 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.299399 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.309659 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.309771 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.310382 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/65f8642a-46ce-4023-a4bb-168f8a68e3ec-srv-cert\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.311674 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.311870 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-serving-cert\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.313213 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/3b55617e-e44e-48c2-890a-2c2becd1ebcd-serving-cert\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.313022 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"package-server-manager-serving-cert\" (UniqueName: \"kubernetes.io/secret/7141add7-a1ba-41ed-92e4-51a7684a90be-package-server-manager-serving-cert\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.314215 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/3b55617e-e44e-48c2-890a-2c2becd1ebcd-config\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.314332 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.315767 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mcc-auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-mcc-auth-proxy-config\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.317385 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.320536 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.320699 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/31057271-37ac-4277-9ab0-3ef846dfb6dd-metrics-tls\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.321931 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.322524 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.323089 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"stats-auth\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-stats-auth\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.323362 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/71747bde-f844-47e2-aad8-abf178e2f5bd-etcd-client\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.323384 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.323418 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.323650 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/71747bde-f844-47e2-aad8-abf178e2f5bd-config\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.320295 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.331936 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-proxy-tls\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.332687 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"default-certificate\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-default-certificate\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.337789 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-metrics-certs\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.346458 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca/service-ca-9c57cc56f-4xm9p"] Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.355451 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/3b55617e-e44e-48c2-890a-2c2becd1ebcd-kube-api-access\") pod \"kube-apiserver-operator-766d6c64bb-llfrc\" (UID: \"3b55617e-e44e-48c2-890a-2c2becd1ebcd\") " pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.364046 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j6jjs\" (UniqueName: \"kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs\") pod \"marketplace-operator-79b997595-rxr5v\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.368405 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-69kwb\" (UniqueName: \"kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb\") pod \"oauth-openshift-558db77b4-m25rz\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.385876 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cm5wb\" (UniqueName: \"kubernetes.io/projected/65f8642a-46ce-4023-a4bb-168f8a68e3ec-kube-api-access-cm5wb\") pod \"olm-operator-6b444d44fb-pg7ff\" (UID: \"65f8642a-46ce-4023-a4bb-168f8a68e3ec\") " pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.391583 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.391800 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:36.891772747 +0000 UTC m=+143.906980281 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.391893 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.391938 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58dj8\" (UniqueName: \"kubernetes.io/projected/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-kube-api-access-58dj8\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.391961 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-image-import-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.391986 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b86d7c-a747-40c2-b76f-76eec0f9439c-config\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392025 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-config\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392041 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-socket-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392057 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-serving-cert\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392106 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mzrdr\" (UniqueName: \"kubernetes.io/projected/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-kube-api-access-mzrdr\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392126 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1c02e39-f7b4-4949-ba76-132058e2587c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392142 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-mountpoint-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392195 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392218 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ss6w6\" (UniqueName: \"kubernetes.io/projected/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-kube-api-access-ss6w6\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392252 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-n8qsw\" (UniqueName: \"kubernetes.io/projected/590dfde0-b415-4fb3-85c5-b8d5376617c8-kube-api-access-n8qsw\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.392950 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-config\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.393442 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-import-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-image-import-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.393686 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/30b86d7c-a747-40c2-b76f-76eec0f9439c-config\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.393770 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"mountpoint-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-mountpoint-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.393894 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/e1c02e39-f7b4-4949-ba76-132058e2587c-trusted-ca\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.393981 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"socket-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-socket-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.394734 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9431171-295f-4b2f-9a03-3325d993850c-config-volume\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.395093 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.398467 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-serving-cert\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.398817 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/b9431171-295f-4b2f-9a03-3325d993850c-config-volume\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.398964 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ldq6f\" (UniqueName: \"kubernetes.io/projected/50bd392d-305c-4e83-9769-8313e7f6a514-kube-api-access-ldq6f\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.398991 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399037 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd14d895-97b2-4840-aa5b-ed942b6a89ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399058 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-serving-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399106 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399132 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acda3264-e715-41ba-bd3f-843e23ecfd82-proxy-tls\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399179 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399205 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d8djz\" (UniqueName: \"kubernetes.io/projected/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-kube-api-access-d8djz\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399251 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-serving-cert\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399298 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-certs\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399315 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xgsnt\" (UniqueName: \"kubernetes.io/projected/7f576bdf-9ebc-46a8-8079-cb105274bba2-kube-api-access-xgsnt\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399363 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399384 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399402 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99dbd\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-kube-api-access-99dbd\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399453 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30b86d7c-a747-40c2-b76f-76eec0f9439c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399484 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399519 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399546 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-srv-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399566 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e1c02e39-f7b4-4949-ba76-132058e2587c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399598 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd14d895-97b2-4840-aa5b-ed942b6a89ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399623 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pckbl\" (UniqueName: \"kubernetes.io/projected/45adf198-876a-4b6e-b75e-348e84079e86-kube-api-access-pckbl\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399638 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-metrics-tls\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399681 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-profile-collector-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399699 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399713 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-webhook-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399731 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7874x\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-kube-api-access-7874x\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399764 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-node-bootstrap-token\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399784 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j5z6x\" (UniqueName: \"kubernetes.io/projected/b9431171-295f-4b2f-9a03-3325d993850c-kube-api-access-j5z6x\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399802 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-encryption-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399844 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-csi-data-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399865 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-plugins-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399885 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cd14d895-97b2-4840-aa5b-ed942b6a89ce-config\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.399910 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-trusted-ca\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400170 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400391 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"images\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-images\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400474 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9431171-295f-4b2f-9a03-3325d993850c-metrics-tls\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400546 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-node-pullsecrets\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400579 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30b86d7c-a747-40c2-b76f-76eec0f9439c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400608 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/590dfde0-b415-4fb3-85c5-b8d5376617c8-tmpfs\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400641 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-cert\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400675 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-registration-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400705 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2wxzn\" (UniqueName: \"kubernetes.io/projected/acda3264-e715-41ba-bd3f-843e23ecfd82-kube-api-access-2wxzn\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400730 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-client\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400759 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-89fq5\" (UniqueName: \"kubernetes.io/projected/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-kube-api-access-89fq5\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400824 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd14d895-97b2-4840-aa5b-ed942b6a89ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400866 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit-dir\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.400970 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit-dir\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.401960 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-trusted-ca-bundle\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.402009 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:36.901988451 +0000 UTC m=+143.917195985 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.402781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"auth-proxy-config\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-auth-proxy-config\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.403145 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-config\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.403589 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registration-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-registration-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.403666 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-pullsecrets\" (UniqueName: \"kubernetes.io/host-path/7f576bdf-9ebc-46a8-8079-cb105274bba2-node-pullsecrets\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.403885 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-audit\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.405207 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"images\" (UniqueName: \"kubernetes.io/configmap/acda3264-e715-41ba-bd3f-843e23ecfd82-images\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.405321 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-serving-ca\" (UniqueName: \"kubernetes.io/configmap/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-serving-ca\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.406895 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"tmpfs\" (UniqueName: \"kubernetes.io/empty-dir/590dfde0-b415-4fb3-85c5-b8d5376617c8-tmpfs\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.407582 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"srv-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-srv-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.408253 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-bootstrap-token\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-node-bootstrap-token\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.408856 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"image-registry-operator-tls\" (UniqueName: \"kubernetes.io/secret/e1c02e39-f7b4-4949-ba76-132058e2587c-image-registry-operator-tls\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.408940 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-plugins-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.409192 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-serving-cert\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.409194 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-webhook-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.409249 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"csi-data-dir\" (UniqueName: \"kubernetes.io/host-path/45adf198-876a-4b6e-b75e-348e84079e86-csi-data-dir\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.410365 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-trusted-ca\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.410672 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/590dfde0-b415-4fb3-85c5-b8d5376617c8-apiservice-cert\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.411733 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/30b86d7c-a747-40c2-b76f-76eec0f9439c-serving-cert\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.411909 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-cert\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.412042 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/b9431171-295f-4b2f-9a03-3325d993850c-metrics-tls\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.413034 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"profile-collector-cert\" (UniqueName: \"kubernetes.io/secret/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-profile-collector-cert\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.414781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"encryption-config\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-encryption-config\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.415026 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-certs\" (UniqueName: \"kubernetes.io/secret/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-webhook-certs\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.415148 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etcd-client\" (UniqueName: \"kubernetes.io/secret/7f576bdf-9ebc-46a8-8079-cb105274bba2-etcd-client\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.415702 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-tls\" (UniqueName: \"kubernetes.io/secret/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-metrics-tls\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.416521 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gp74w\" (UniqueName: \"kubernetes.io/projected/71747bde-f844-47e2-aad8-abf178e2f5bd-kube-api-access-gp74w\") pod \"etcd-operator-b45778765-brgxl\" (UID: \"71747bde-f844-47e2-aad8-abf178e2f5bd\") " pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.416959 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-tls\" (UniqueName: \"kubernetes.io/secret/acda3264-e715-41ba-bd3f-843e23ecfd82-proxy-tls\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.417619 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"certs\" (UniqueName: \"kubernetes.io/secret/50bd392d-305c-4e83-9769-8313e7f6a514-certs\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.418002 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-serving-cert\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.418661 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cd14d895-97b2-4840-aa5b-ed942b6a89ce-serving-cert\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.442936 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-svx6f\" (UniqueName: \"kubernetes.io/projected/78d19e19-18ac-44d1-ac32-bfe5c2b58f8d-kube-api-access-svx6f\") pod \"router-default-5444994796-9rxcr\" (UID: \"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d\") " pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.474113 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kn5kv\" (UniqueName: \"kubernetes.io/projected/e1d0254f-dbc4-42b0-a0cf-dc7532684f4a-kube-api-access-kn5kv\") pod \"machine-config-controller-84d6567774-4cth5\" (UID: \"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a\") " pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.480223 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xz4ld\" (UniqueName: \"kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld\") pod \"collect-profiles-29500605-9spmm\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.494727 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.501518 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p68pn\" (UniqueName: \"kubernetes.io/projected/7141add7-a1ba-41ed-92e4-51a7684a90be-kube-api-access-p68pn\") pod \"package-server-manager-789f6589d5-x766n\" (UID: \"7141add7-a1ba-41ed-92e4-51a7684a90be\") " pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.502190 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.502468 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.002421303 +0000 UTC m=+144.017628867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.502830 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.503512 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.003497432 +0000 UTC m=+144.018705126 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.517740 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.517899 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.525423 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.539156 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-67hhn\" (UniqueName: \"kubernetes.io/projected/71a6d661-54a6-430a-902b-656503b8d12a-kube-api-access-67hhn\") pod \"control-plane-machine-set-operator-78cbb6b69f-z7cwj\" (UID: \"71a6d661-54a6-430a-902b-656503b8d12a\") " pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.550563 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.552771 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vtt9k\" (UniqueName: \"kubernetes.io/projected/31057271-37ac-4277-9ab0-3ef846dfb6dd-kube-api-access-vtt9k\") pod \"dns-operator-744455d44c-9jft7\" (UID: \"31057271-37ac-4277-9ab0-3ef846dfb6dd\") " pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.565505 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.574598 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.576903 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dj8dl\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.583201 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.595692 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.601587 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.603405 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.603502 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.103479121 +0000 UTC m=+144.118686675 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.603764 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.604172 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.104160569 +0000 UTC m=+144.119368103 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.614770 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.624048 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-49vn6\" (UniqueName: \"kubernetes.io/projected/73892a3f-54ec-47ac-9081-5d1a83088794-kube-api-access-49vn6\") pod \"migrator-59844c95c7-ksnn8\" (UID: \"73892a3f-54ec-47ac-9081-5d1a83088794\") " pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.638872 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-bound-sa-token\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.656954 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58dj8\" (UniqueName: \"kubernetes.io/projected/7acae8a8-1cb9-489a-b12a-397d8a5c68a2-kube-api-access-58dj8\") pod \"service-ca-operator-777779d784-427gh\" (UID: \"7acae8a8-1cb9-489a-b12a-397d8a5c68a2\") " pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.679473 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mzrdr\" (UniqueName: \"kubernetes.io/projected/ffa77803-cf30-4031-a0b2-c6ad1a66ffd7-kube-api-access-mzrdr\") pod \"multus-admission-controller-857f4d67dd-q4xdj\" (UID: \"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7\") " pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.684958 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" event={"ID":"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b","Type":"ContainerStarted","Data":"3fa481b70f74b7c27254045182aff3978ea1ded8c88949c60844f0ab3b0ee52a"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.685003 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" event={"ID":"9a03a3dc-2cf1-48b7-8b9a-f2718b6cae2b","Type":"ContainerStarted","Data":"380f8b1b9229d438da83e6c5623f40c3dd43c1e2d7447af01bedeecde000abdb"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.686476 4703 generic.go:334] "Generic (PLEG): container finished" podID="17eac293-e5de-49da-b77d-c511b866a958" containerID="ecbb1877c20c6d35fa752df896c0ba7871a7a379b3fd1d4d96684be68b36f262" exitCode=0 Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.686530 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" event={"ID":"17eac293-e5de-49da-b77d-c511b866a958","Type":"ContainerDied","Data":"ecbb1877c20c6d35fa752df896c0ba7871a7a379b3fd1d4d96684be68b36f262"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.686561 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" event={"ID":"17eac293-e5de-49da-b77d-c511b866a958","Type":"ContainerStarted","Data":"96443de6418b6b8c07202f7cb1d8e44c9c07939406018f99a69b5fb181e308d9"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.697028 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wwmdc" event={"ID":"67d0ea85-5c1d-4420-afaa-0647a6c1eb93","Type":"ContainerStarted","Data":"053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.697085 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wwmdc" event={"ID":"67d0ea85-5c1d-4420-afaa-0647a6c1eb93","Type":"ContainerStarted","Data":"d024127a0e4c5f5a3e416566620523ae50146e8876773e7c9ae34b332bda4dfa"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.697863 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.704646 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.705820 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.706539 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.707054 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.207040956 +0000 UTC m=+144.222248490 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.717822 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.718843 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-n8qsw\" (UniqueName: \"kubernetes.io/projected/590dfde0-b415-4fb3-85c5-b8d5376617c8-kube-api-access-n8qsw\") pod \"packageserver-d55dfcdfc-56wp2\" (UID: \"590dfde0-b415-4fb3-85c5-b8d5376617c8\") " pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.721215 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" event={"ID":"d5ce7c42-1183-4e5d-8a51-4b817b40de14","Type":"ContainerStarted","Data":"4c8bbf2632f686014afc1006d1365087afc537f30abe6fa31a93b6123cd20863"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.721242 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" event={"ID":"d5ce7c42-1183-4e5d-8a51-4b817b40de14","Type":"ContainerStarted","Data":"420f3bdc61b3190d79860be2f8ab2a05687179b2427f49ffa57b81ecdb69cbc6"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.749797 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ss6w6\" (UniqueName: \"kubernetes.io/projected/c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5-kube-api-access-ss6w6\") pod \"ingress-canary-crnml\" (UID: \"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5\") " pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.758554 4703 generic.go:334] "Generic (PLEG): container finished" podID="a5941c46-e423-4c74-817a-1f08e831d439" containerID="305d1f2810c8c563e212393718f111226133c01c096ba8e1e51a704d72b9651f" exitCode=0 Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.758670 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" event={"ID":"a5941c46-e423-4c74-817a-1f08e831d439","Type":"ContainerDied","Data":"305d1f2810c8c563e212393718f111226133c01c096ba8e1e51a704d72b9651f"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.758700 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" event={"ID":"a5941c46-e423-4c74-817a-1f08e831d439","Type":"ContainerStarted","Data":"df607aca5f7d6a5a973fc0b6a06f782bbbd833c587b6a3fe075826d2bd1f6799"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.760390 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ldq6f\" (UniqueName: \"kubernetes.io/projected/50bd392d-305c-4e83-9769-8313e7f6a514-kube-api-access-ldq6f\") pod \"machine-config-server-29dfl\" (UID: \"50bd392d-305c-4e83-9769-8313e7f6a514\") " pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.766010 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xgsnt\" (UniqueName: \"kubernetes.io/projected/7f576bdf-9ebc-46a8-8079-cb105274bba2-kube-api-access-xgsnt\") pod \"apiserver-76f77b778f-lbjjm\" (UID: \"7f576bdf-9ebc-46a8-8079-cb105274bba2\") " pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.779725 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" event={"ID":"644a0236-d3fc-404d-a4da-203ca11b1316","Type":"ContainerStarted","Data":"7cb1aa05024f34b1618a4afe4a531706354a0af5e8dacb62af4b5cbec1b67e22"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.779764 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" event={"ID":"644a0236-d3fc-404d-a4da-203ca11b1316","Type":"ContainerStarted","Data":"b90e25cf06afe529b73104c71b9033bf89a6790aa2169ccbb5a40bd785f07785"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.780022 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.800190 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" event={"ID":"becc124e-6c1b-43c2-af98-0c6df242b0d9","Type":"ContainerStarted","Data":"9e6e445b6d8b31555ea77299ba0ca482e748ec69fbd4ff15f6da3f5ce45582d6"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.800256 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" event={"ID":"becc124e-6c1b-43c2-af98-0c6df242b0d9","Type":"ContainerStarted","Data":"d12af5b709ed36f12833ac48ceac65f19101f3100bfea5608459fed2b2279fe7"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.806716 4703 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7clns container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.806774 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.807954 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d8djz\" (UniqueName: \"kubernetes.io/projected/14a43a1a-4b8f-4365-ae50-57a2097ffd5c-kube-api-access-d8djz\") pod \"catalog-operator-68c6474976-p26zs\" (UID: \"14a43a1a-4b8f-4365-ae50-57a2097ffd5c\") " pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.808770 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.810653 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.310634382 +0000 UTC m=+144.325841916 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.813522 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/30b86d7c-a747-40c2-b76f-76eec0f9439c-kube-api-access\") pod \"kube-controller-manager-operator-78b949d7b-9vl8k\" (UID: \"30b86d7c-a747-40c2-b76f-76eec0f9439c\") " pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.820464 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" event={"ID":"3ffae680-60cc-4057-b1b5-f95418327d4d","Type":"ContainerStarted","Data":"01426fd638d55422f599d953fafbcc16b88e18cf100f974933d6680e029eb430"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.820509 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" event={"ID":"3ffae680-60cc-4057-b1b5-f95418327d4d","Type":"ContainerStarted","Data":"bca1db9e23be3955e6d4f0d5b7ad7f6d0d426d5f4c0fbeb1a7db22232a4cc8ad"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.834676 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99dbd\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-kube-api-access-99dbd\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.835050 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.843711 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.854074 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/4030ecf3-bdf8-48ae-b509-bb7eace24ff9-bound-sa-token\") pod \"ingress-operator-5b745b69d9-czmb9\" (UID: \"4030ecf3-bdf8-48ae-b509-bb7eace24ff9\") " pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.890317 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j5z6x\" (UniqueName: \"kubernetes.io/projected/b9431171-295f-4b2f-9a03-3325d993850c-kube-api-access-j5z6x\") pod \"dns-default-ptg9n\" (UID: \"b9431171-295f-4b2f-9a03-3325d993850c\") " pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.904994 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" event={"ID":"a9a7821e-59f7-414f-bebc-9fab34813bbc","Type":"ContainerStarted","Data":"6541b5cdf5393985cbcb19eeb231e6712edca7bbc1af99ea9cad1777b33f473f"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.905070 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.905086 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" event={"ID":"a9a7821e-59f7-414f-bebc-9fab34813bbc","Type":"ContainerStarted","Data":"97c6c6e68f43964d0092778626602db8d35dd37a1cf164633ea1c56459e40939"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.911317 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7874x\" (UniqueName: \"kubernetes.io/projected/e1c02e39-f7b4-4949-ba76-132058e2587c-kube-api-access-7874x\") pod \"cluster-image-registry-operator-dc59b4c8b-8tg7z\" (UID: \"e1c02e39-f7b4-4949-ba76-132058e2587c\") " pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.911788 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:36 crc kubenswrapper[4703]: E0202 12:53:36.913643 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.413624422 +0000 UTC m=+144.428831956 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.921657 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.924575 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" event={"ID":"f2f2ff94-e912-47d2-9bdb-d5d97b725715","Type":"ContainerStarted","Data":"48928420a00a88dddfe0d8af77fc5cc7df9019bc1dfee40ca5c64086a4a82fff"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.924646 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" event={"ID":"f2f2ff94-e912-47d2-9bdb-d5d97b725715","Type":"ContainerStarted","Data":"590b19aff17280edfc636be04c24dea5eba5d8838c00cf9aa42e1c87851738c8"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.930211 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.933370 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2wxzn\" (UniqueName: \"kubernetes.io/projected/acda3264-e715-41ba-bd3f-843e23ecfd82-kube-api-access-2wxzn\") pod \"machine-config-operator-74547568cd-vn68g\" (UID: \"acda3264-e715-41ba-bd3f-843e23ecfd82\") " pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.936965 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.942548 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-89fq5\" (UniqueName: \"kubernetes.io/projected/ef64d2ec-3586-41a5-a2fc-60f0ea764e6c-kube-api-access-89fq5\") pod \"kube-storage-version-migrator-operator-b67b599dd-z8msl\" (UID: \"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c\") " pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.945260 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/console-f9d7485db-bb2nk" event={"ID":"cb600eb2-3a8a-4303-b99b-a6c40cd9149e","Type":"ContainerStarted","Data":"5e5b3b90d557d098dd8abf33811e936738964d5a171b62c410dc1dcae720e3ac"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.946776 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.947942 4703 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-jhwld container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" start-of-body= Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.947996 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": dial tcp 10.217.0.6:8443: connect: connection refused" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.948702 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pckbl\" (UniqueName: \"kubernetes.io/projected/45adf198-876a-4b6e-b75e-348e84079e86-kube-api-access-pckbl\") pod \"csi-hostpathplugin-sbswk\" (UID: \"45adf198-876a-4b6e-b75e-348e84079e86\") " pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.954983 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/cd14d895-97b2-4840-aa5b-ed942b6a89ce-kube-api-access\") pod \"openshift-kube-scheduler-operator-5fdd9b5758-2g7gn\" (UID: \"cd14d895-97b2-4840-aa5b-ed942b6a89ce\") " pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.955492 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" event={"ID":"335fb805-5368-4ab2-bdb6-d642aeb11902","Type":"ContainerStarted","Data":"d0cc126f1a087ff15a8488d30df9c42bd50e16388c3a21090cba319d9b1e30c2"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.955533 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" event={"ID":"335fb805-5368-4ab2-bdb6-d642aeb11902","Type":"ContainerStarted","Data":"04127f74a356fe37f02fc9930ea6cb817a3eee40965fd4d0606790e0d108b3f8"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.955547 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" event={"ID":"335fb805-5368-4ab2-bdb6-d642aeb11902","Type":"ContainerStarted","Data":"0cc52a75aa1ec57c6fb240663781f2e71d6fcfbf5a841c0d165cfb1c17394e1b"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.970838 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.971590 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" event={"ID":"e37ec3da-d2b7-4a09-a48b-747ba887fa26","Type":"ContainerStarted","Data":"bf1d1ae9625cbc1a08c1277f547959c6a55f1219e6c8a20f6f05e7ed7000d869"} Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.971740 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.978153 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" Feb 02 12:53:36 crc kubenswrapper[4703]: I0202 12:53:36.994150 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.007302 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.013514 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-machine-config-operator/machine-config-server-29dfl" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.013638 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.015680 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.515662487 +0000 UTC m=+144.530870081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.025370 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.046198 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.049805 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ingress-canary/ingress-canary-crnml" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.119463 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.120873 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.620850216 +0000 UTC m=+144.636057750 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.208377 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.227184 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.227654 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.727633348 +0000 UTC m=+144.742840942 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.335506 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.336367 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.836348912 +0000 UTC m=+144.851556446 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.369818 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca/service-ca-9c57cc56f-4xm9p" podStartSLOduration=121.369795931 podStartE2EDuration="2m1.369795931s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.363603535 +0000 UTC m=+144.378811069" watchObservedRunningTime="2026-02-02 12:53:37.369795931 +0000 UTC m=+144.385003465" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.438988 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.439380 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:37.939365352 +0000 UTC m=+144.954572886 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.485249 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.542963 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.543485 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.043155323 +0000 UTC m=+145.058362867 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.543654 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.543934 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.043922784 +0000 UTC m=+145.059130318 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.555737 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.560401 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-etcd-operator/etcd-operator-b45778765-brgxl"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.624576 4703 patch_prober.go:28] interesting pod/console-operator-58897d9998-2w9hx container/console-operator namespace/openshift-console-operator: Readiness probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 12:53:37 crc kubenswrapper[4703]: [+]log ok Feb 02 12:53:37 crc kubenswrapper[4703]: [-]poststarthook/max-in-flight-filter failed: reason withheld Feb 02 12:53:37 crc kubenswrapper[4703]: [-]poststarthook/storage-object-count-tracker-hook failed: reason withheld Feb 02 12:53:37 crc kubenswrapper[4703]: [+]shutdown ok Feb 02 12:53:37 crc kubenswrapper[4703]: readyz check failed Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.624637 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" podUID="7837a007-6c6a-4856-8b9a-a8397e864ceb" containerName="console-operator" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.650193 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:37 crc kubenswrapper[4703]: W0202 12:53:37.650871 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71747bde_f844_47e2_aad8_abf178e2f5bd.slice/crio-53025a30d3c3cc855aece150fb29a57691a45ff44d84f32976aaeb38a68fc1f5 WatchSource:0}: Error finding container 53025a30d3c3cc855aece150fb29a57691a45ff44d84f32976aaeb38a68fc1f5: Status 404 returned error can't find the container with id 53025a30d3c3cc855aece150fb29a57691a45ff44d84f32976aaeb38a68fc1f5 Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.651013 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.150992784 +0000 UTC m=+145.166200318 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.727179 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" podStartSLOduration=121.727155182 podStartE2EDuration="2m1.727155182s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.725104117 +0000 UTC m=+144.740311651" watchObservedRunningTime="2026-02-02 12:53:37.727155182 +0000 UTC m=+144.742362716" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.758548 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.758889 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.258874995 +0000 UTC m=+145.274082529 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.761888 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver-operator/openshift-apiserver-operator-796bbdcf4f-tft76" podStartSLOduration=122.761873926 podStartE2EDuration="2m2.761873926s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.759485642 +0000 UTC m=+144.774693186" watchObservedRunningTime="2026-02-02 12:53:37.761873926 +0000 UTC m=+144.777081460" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.788173 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.802697 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/console-f9d7485db-bb2nk" podStartSLOduration=122.802680403 podStartE2EDuration="2m2.802680403s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.801666586 +0000 UTC m=+144.816874120" watchObservedRunningTime="2026-02-02 12:53:37.802680403 +0000 UTC m=+144.817887937" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.862358 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.862872 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.362851052 +0000 UTC m=+145.378058586 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.895537 4703 csr.go:261] certificate signing request csr-jws4b is approved, waiting to be issued Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.897596 4703 csr.go:257] certificate signing request csr-jws4b is issued Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.914611 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.915540 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager-operator/openshift-controller-manager-operator-756b6f6bc6-q6hnb" podStartSLOduration=122.915522898 podStartE2EDuration="2m2.915522898s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.915395185 +0000 UTC m=+144.930602719" watchObservedRunningTime="2026-02-02 12:53:37.915522898 +0000 UTC m=+144.930730432" Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.918359 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.922476 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5"] Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.964596 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:37 crc kubenswrapper[4703]: E0202 12:53:37.964935 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.464919547 +0000 UTC m=+145.480127081 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:37 crc kubenswrapper[4703]: I0202 12:53:37.997187 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console/downloads-7954f5f757-wwmdc" podStartSLOduration=122.997171084 podStartE2EDuration="2m2.997171084s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:37.996014213 +0000 UTC m=+145.011221757" watchObservedRunningTime="2026-02-02 12:53:37.997171084 +0000 UTC m=+145.012378618" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003512 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003569 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-9rxcr" event={"ID":"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d","Type":"ContainerStarted","Data":"fc22258c87f35a485319927d1294f4c121954068a18d67a473a605f61eae9c9b"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003590 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress/router-default-5444994796-9rxcr" event={"ID":"78d19e19-18ac-44d1-ac32-bfe5c2b58f8d","Type":"ContainerStarted","Data":"633d5f30c78e194370fdbdb4523183354a5a42a519008cf183806c1a78e39e2d"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003603 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" event={"ID":"71747bde-f844-47e2-aad8-abf178e2f5bd","Type":"ContainerStarted","Data":"53025a30d3c3cc855aece150fb29a57691a45ff44d84f32976aaeb38a68fc1f5"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003617 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" event={"ID":"17eac293-e5de-49da-b77d-c511b866a958","Type":"ContainerStarted","Data":"a9ee346b7d06a4efcfbadf957b9b083f370aff085b2f2ce70cd04cecc50e3b96"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003629 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-29dfl" event={"ID":"50bd392d-305c-4e83-9769-8313e7f6a514","Type":"ContainerStarted","Data":"f649f8a6ad8728de67a116c7810e1566e83da354dc8a37a4694059fc08738567"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.003640 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-server-29dfl" event={"ID":"50bd392d-305c-4e83-9769-8313e7f6a514","Type":"ContainerStarted","Data":"2d46134d57ee47f839c9af6701ab8b2ede907c76c2c245d3d8d3024e67b5be1a"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.006761 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" event={"ID":"65f8642a-46ce-4023-a4bb-168f8a68e3ec","Type":"ContainerStarted","Data":"a6afdd8163d440b2279cb67c0f66e967466f06f5ab282e1e0bef53eee476b276"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.029947 4703 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7clns container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" start-of-body= Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.029997 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": dial tcp 10.217.0.5:8443: connect: connection refused" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.031968 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" event={"ID":"a5941c46-e423-4c74-817a-1f08e831d439","Type":"ContainerStarted","Data":"24b51be6080126e92b9fb777a477d4605a798860dc7cb9f160a0d4fa3d692802"} Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.039696 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.039770 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.070990 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.071423 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.571402151 +0000 UTC m=+145.586609685 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.108823 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.173040 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.174231 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.674215346 +0000 UTC m=+145.689422880 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.183011 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.187109 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication-operator/authentication-operator-69f744f599-lm8wl" podStartSLOduration=123.187087763 podStartE2EDuration="2m3.187087763s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.183683661 +0000 UTC m=+145.198891195" watchObservedRunningTime="2026-02-02 12:53:38.187087763 +0000 UTC m=+145.202295297" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.274942 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.275263 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.775236244 +0000 UTC m=+145.790443828 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.352536 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" podStartSLOduration=123.352182073 podStartE2EDuration="2m3.352182073s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.348765391 +0000 UTC m=+145.363972925" watchObservedRunningTime="2026-02-02 12:53:38.352182073 +0000 UTC m=+145.367389607" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.377140 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.377530 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.877517265 +0000 UTC m=+145.892724799 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.392052 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-console-operator/console-operator-58897d9998-2w9hx" podStartSLOduration=123.392038475 podStartE2EDuration="2m3.392038475s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.390701769 +0000 UTC m=+145.405909303" watchObservedRunningTime="2026-02-02 12:53:38.392038475 +0000 UTC m=+145.407246009" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.478541 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.478976 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:38.978957863 +0000 UTC m=+145.994165397 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.561206 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-samples-operator/cluster-samples-operator-665b6dd947-rxz57" podStartSLOduration=123.561183364 podStartE2EDuration="2m3.561183364s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.559944681 +0000 UTC m=+145.575152215" watchObservedRunningTime="2026-02-02 12:53:38.561183364 +0000 UTC m=+145.576390898" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.582938 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.583248 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.083235027 +0000 UTC m=+146.098442551 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.618250 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.620233 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" start-of-body= Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.620274 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="Get \"http://localhost:1936/healthz/ready\": dial tcp [::1]:1936: connect: connection refused" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.669006 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/machine-api-operator-5694c8668f-bsrg2" podStartSLOduration=122.668974564 podStartE2EDuration="2m2.668974564s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.66660287 +0000 UTC m=+145.681810404" watchObservedRunningTime="2026-02-02 12:53:38.668974564 +0000 UTC m=+145.684182098" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.687150 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.688439 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.188418146 +0000 UTC m=+146.203625680 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.723289 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-cluster-machine-approver/machine-approver-56656f9798-dmmgt" podStartSLOduration=123.723250623 podStartE2EDuration="2m3.723250623s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.720939371 +0000 UTC m=+145.736146905" watchObservedRunningTime="2026-02-02 12:53:38.723250623 +0000 UTC m=+145.738458167" Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.790653 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.791201 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.29118764 +0000 UTC m=+146.306395174 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.903827 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Certificate expiration is 2027-02-02 12:48:37 +0000 UTC, rotation deadline is 2026-10-16 15:01:06.710824092 +0000 UTC Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.903884 4703 certificate_manager.go:356] kubernetes.io/kubelet-serving: Waiting 6146h7m27.806942411s for next certificate rotation Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.908174 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.408139046 +0000 UTC m=+146.423346580 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.904651 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.913787 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:38 crc kubenswrapper[4703]: E0202 12:53:38.914243 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.4142298 +0000 UTC m=+146.429437334 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:38 crc kubenswrapper[4703]: I0202 12:53:38.984430 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-service-ca-operator/service-ca-operator-777779d784-427gh"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.016301 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress/router-default-5444994796-9rxcr" podStartSLOduration=124.016256904 podStartE2EDuration="2m4.016256904s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:38.983932104 +0000 UTC m=+145.999139638" watchObservedRunningTime="2026-02-02 12:53:39.016256904 +0000 UTC m=+146.031464438" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.021313 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.024332 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.52431179 +0000 UTC m=+146.539519314 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.025984 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.026381 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.526368306 +0000 UTC m=+146.541575840 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.028729 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.071709 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" podStartSLOduration=123.071694545 podStartE2EDuration="2m3.071694545s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.069073334 +0000 UTC m=+146.084280868" watchObservedRunningTime="2026-02-02 12:53:39.071694545 +0000 UTC m=+146.086902079" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.098602 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" event={"ID":"3b55617e-e44e-48c2-890a-2c2becd1ebcd","Type":"ContainerStarted","Data":"5ee7d645149af93919c9c6afe541e7c5bff04fb7d612616e2c3f133658eb7d90"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.098651 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" event={"ID":"3b55617e-e44e-48c2-890a-2c2becd1ebcd","Type":"ContainerStarted","Data":"d51028b42d620180e4369a62ce89a37441b2190ea4736679fa582f5b13a91bc2"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.112207 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" podStartSLOduration=124.112190584 podStartE2EDuration="2m4.112190584s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.110283973 +0000 UTC m=+146.125491507" watchObservedRunningTime="2026-02-02 12:53:39.112190584 +0000 UTC m=+146.127398118" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.120702 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" event={"ID":"65f8642a-46ce-4023-a4bb-168f8a68e3ec","Type":"ContainerStarted","Data":"6116f9775fdfe378a732679c2fa5cd2ab6cdcf6da086d751ac518b83e76c7159"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.121664 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.127675 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.127983 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.627951258 +0000 UTC m=+146.643158812 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.128216 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.128578 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.628569235 +0000 UTC m=+146.643776769 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.140526 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" event={"ID":"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a","Type":"ContainerStarted","Data":"94e7cc71bf0d3d1432a85e466847960d1e7d91e39fcc4078f73de0e2c426ac9e"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.140567 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" event={"ID":"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a","Type":"ContainerStarted","Data":"eb3eb7bf7fa5ae48d5bafb9cb885292777faf9b7a9fec7283f70867d0e57cb21"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.141398 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerStarted","Data":"183546ba7d2be3961f39aec48156b93ad70e1f3aa707d1dce8f581fd308401c5"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.141416 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerStarted","Data":"c8f4804fd124d3d6cc3b7e593a946a7367770d264189d44ed191912849ef4d5e"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.142667 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.148598 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" event={"ID":"71747bde-f844-47e2-aad8-abf178e2f5bd","Type":"ContainerStarted","Data":"e55d82c66d70053b807c67309a53b62cb0f243e10ced086a83c27d13cb591023"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.170350 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" event={"ID":"721e0407-77c9-416a-9297-6074e75d0a6e","Type":"ContainerStarted","Data":"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.170407 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" event={"ID":"721e0407-77c9-416a-9297-6074e75d0a6e","Type":"ContainerStarted","Data":"7b024c2d228fa87093e930574c08a23fb32aa44e45ccf9639e0482ec17f14a4e"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.173895 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.173938 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.174247 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" event={"ID":"7acae8a8-1cb9-489a-b12a-397d8a5c68a2","Type":"ContainerStarted","Data":"8ac3038f063b994e3a2b255eba49e58c5f2bb2187f16ae75a78768d1e0c613a4"} Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.174289 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.178479 4703 patch_prober.go:28] interesting pod/olm-operator-6b444d44fb-pg7ff container/olm-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.15:8443/healthz\": dial tcp 10.217.0.15:8443: connect: connection refused" start-of-body= Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.178505 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" podUID="65f8642a-46ce-4023-a4bb-168f8a68e3ec" containerName="olm-operator" probeResult="failure" output="Get \"https://10.217.0.15:8443/healthz\": dial tcp 10.217.0.15:8443: connect: connection refused" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.180788 4703 patch_prober.go:28] interesting pod/openshift-config-operator-7777fb866f-7dn5j container/openshift-config-operator namespace/openshift-config-operator: Readiness probe status=failure output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" start-of-body= Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.180809 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" podUID="17eac293-e5de-49da-b77d-c511b866a958" containerName="openshift-config-operator" probeResult="failure" output="Get \"https://10.217.0.11:8443/healthz\": dial tcp 10.217.0.11:8443: connect: connection refused" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.198105 4703 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rxr5v container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.198165 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.205111 4703 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-m25rz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.205172 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.228067 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.229875 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.234495 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.734471893 +0000 UTC m=+146.749679477 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.279535 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-server-29dfl" podStartSLOduration=6.279513014 podStartE2EDuration="6.279513014s" podCreationTimestamp="2026-02-02 12:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.271820838 +0000 UTC m=+146.287028392" watchObservedRunningTime="2026-02-02 12:53:39.279513014 +0000 UTC m=+146.294720548" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.331161 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" podStartSLOduration=123.331134073 podStartE2EDuration="2m3.331134073s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.308050102 +0000 UTC m=+146.323257656" watchObservedRunningTime="2026-02-02 12:53:39.331134073 +0000 UTC m=+146.346341617" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.332822 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.333071 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.833060755 +0000 UTC m=+146.848268289 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.336832 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["hostpath-provisioner/csi-hostpathplugin-sbswk"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.355477 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-etcd-operator/etcd-operator-b45778765-brgxl" podStartSLOduration=124.355458887 podStartE2EDuration="2m4.355458887s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.353255688 +0000 UTC m=+146.368463212" watchObservedRunningTime="2026-02-02 12:53:39.355458887 +0000 UTC m=+146.370666421" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.396366 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.398675 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/multus-admission-controller-857f4d67dd-q4xdj"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.433627 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.434474 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:39.934453862 +0000 UTC m=+146.949661396 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.459904 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" podStartSLOduration=124.459886306 podStartE2EDuration="2m4.459886306s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.418742339 +0000 UTC m=+146.433949873" watchObservedRunningTime="2026-02-02 12:53:39.459886306 +0000 UTC m=+146.475093830" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.484216 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver-operator/kube-apiserver-operator-766d6c64bb-llfrc" podStartSLOduration=124.48420195 podStartE2EDuration="2m4.48420195s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.461131809 +0000 UTC m=+146.476339343" watchObservedRunningTime="2026-02-02 12:53:39.48420195 +0000 UTC m=+146.499409484" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.506551 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.506597 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.537374 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.537809 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.037790461 +0000 UTC m=+147.052997995 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.566508 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" podStartSLOduration=123.566491173 podStartE2EDuration="2m3.566491173s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:39.540423872 +0000 UTC m=+146.555631406" watchObservedRunningTime="2026-02-02 12:53:39.566491173 +0000 UTC m=+146.581698707" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.575038 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns-operator/dns-operator-744455d44c-9jft7"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.648807 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.649165 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.149148756 +0000 UTC m=+147.164356290 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.654042 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:39 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:39 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:39 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.654103 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.685401 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g"] Feb 02 12:53:39 crc kubenswrapper[4703]: W0202 12:53:39.717841 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podacda3264_e715_41ba_bd3f_843e23ecfd82.slice/crio-79e0b2aa7f05bd0dc666b40d3b23c90ec1e9eab4a7f702b15ed02c349033bfe0 WatchSource:0}: Error finding container 79e0b2aa7f05bd0dc666b40d3b23c90ec1e9eab4a7f702b15ed02c349033bfe0: Status 404 returned error can't find the container with id 79e0b2aa7f05bd0dc666b40d3b23c90ec1e9eab4a7f702b15ed02c349033bfe0 Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.721054 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.750206 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.750592 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.250576464 +0000 UTC m=+147.265783998 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.809324 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.838632 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.855775 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.856070 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.356053991 +0000 UTC m=+147.371261515 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.922504 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-apiserver/apiserver-76f77b778f-lbjjm"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.923474 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k"] Feb 02 12:53:39 crc kubenswrapper[4703]: I0202 12:53:39.959872 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:39 crc kubenswrapper[4703]: E0202 12:53:39.960304 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.460258944 +0000 UTC m=+147.475466478 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.001695 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9"] Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.059879 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn"] Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.069901 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.070308 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.570290083 +0000 UTC m=+147.585497617 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: W0202 12:53:40.075801 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod7f576bdf_9ebc_46a8_8079_cb105274bba2.slice/crio-d6ef41ccd5212e70f39d08dca408f3e1c78e34b6ecdbfd73ab7a7b50b0341e67 WatchSource:0}: Error finding container d6ef41ccd5212e70f39d08dca408f3e1c78e34b6ecdbfd73ab7a7b50b0341e67: Status 404 returned error can't find the container with id d6ef41ccd5212e70f39d08dca408f3e1c78e34b6ecdbfd73ab7a7b50b0341e67 Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.094206 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-ingress-canary/ingress-canary-crnml"] Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.170911 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.171521 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.671504585 +0000 UTC m=+147.686712119 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.192393 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-dns/dns-default-ptg9n"] Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.271777 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.272261 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.772244045 +0000 UTC m=+147.787451579 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.281900 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" event={"ID":"e1c02e39-f7b4-4949-ba76-132058e2587c","Type":"ContainerStarted","Data":"b367e1e7894dcc3d30f7f2ffab27239a892418684ef3d9c0a4b3007a9c1a31c2"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.282912 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.283076 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.296094 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" event={"ID":"590dfde0-b415-4fb3-85c5-b8d5376617c8","Type":"ContainerStarted","Data":"d9203d89d576696e3d1d46cf60a9ca13c4a615a754af41ba2152290f34116740"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.318691 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" event={"ID":"afc660f4-96eb-4013-b703-0967895a611b","Type":"ContainerStarted","Data":"a0a72e0182b9a249207dc0121dc975f347391e823f261fa2c90532227bef7d1a"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.318748 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" event={"ID":"afc660f4-96eb-4013-b703-0967895a611b","Type":"ContainerStarted","Data":"9e822584faa754b4b209409c711249e85bd5561afb3a1c1882291e8df77859a9"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.339971 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" event={"ID":"73892a3f-54ec-47ac-9081-5d1a83088794","Type":"ContainerStarted","Data":"cd641d7f1a4fd351accb89986abd13d443c5d99ee19fdd05bffdc26fe963e37e"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.340014 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" event={"ID":"73892a3f-54ec-47ac-9081-5d1a83088794","Type":"ContainerStarted","Data":"cad3b0809ef3bdfbb10b12fe2b03e5e4f5fb173f45e03c9aa017e4e934b095cf"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.346991 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" event={"ID":"45adf198-876a-4b6e-b75e-348e84079e86","Type":"ContainerStarted","Data":"c00b745acf8eb8d47b88fedcc15e33e9ad2eab780df4a68cd5ba8ef316b6839a"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.362643 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" event={"ID":"7f576bdf-9ebc-46a8-8079-cb105274bba2","Type":"ContainerStarted","Data":"d6ef41ccd5212e70f39d08dca408f3e1c78e34b6ecdbfd73ab7a7b50b0341e67"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.373502 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.374255 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.874234888 +0000 UTC m=+147.889442482 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.395590 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" event={"ID":"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c","Type":"ContainerStarted","Data":"07a1027d470d4b6b3fd636411deb4a8ba37c63058c202e92f673e54c0a03ed9f"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.411115 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" event={"ID":"7141add7-a1ba-41ed-92e4-51a7684a90be","Type":"ContainerStarted","Data":"5644a8e6277527442a3e972f642d970765409be33641344d4987c599684a9208"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.411171 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" event={"ID":"7141add7-a1ba-41ed-92e4-51a7684a90be","Type":"ContainerStarted","Data":"3bd74ab7b67802fb8c5f886370edd7633d295b9205382d3710992ca01f9cbaad"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.411185 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" event={"ID":"7141add7-a1ba-41ed-92e4-51a7684a90be","Type":"ContainerStarted","Data":"b58f20538c68dda2270edcbcde16d307581808ad9f767ea91f5407531f4e2638"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.411610 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.414559 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" event={"ID":"acda3264-e715-41ba-bd3f-843e23ecfd82","Type":"ContainerStarted","Data":"79e0b2aa7f05bd0dc666b40d3b23c90ec1e9eab4a7f702b15ed02c349033bfe0"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.425948 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" event={"ID":"e1d0254f-dbc4-42b0-a0cf-dc7532684f4a","Type":"ContainerStarted","Data":"70748e061b015a03af603f170d354706009ec772c577aec143abf0d6ca7331f4"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.435956 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" podStartSLOduration=125.435939797 podStartE2EDuration="2m5.435939797s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.435390493 +0000 UTC m=+147.450598027" watchObservedRunningTime="2026-02-02 12:53:40.435939797 +0000 UTC m=+147.451147331" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.440141 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" event={"ID":"71a6d661-54a6-430a-902b-656503b8d12a","Type":"ContainerStarted","Data":"c216011620716438fad8e19c8111ec16ea4019a10d5d92dfca4b7ddb3ecba538"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.440192 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" event={"ID":"71a6d661-54a6-430a-902b-656503b8d12a","Type":"ContainerStarted","Data":"a17ec3dff2b77a997d11313394498fd20debac7c0785a4e220885241032df444"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.464869 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" event={"ID":"14a43a1a-4b8f-4365-ae50-57a2097ffd5c","Type":"ContainerStarted","Data":"cee49d0458f69db57bf209dafa99d63c5ea2e0494903d78a5f82a72424c7432d"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.465518 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.472672 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" event={"ID":"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7","Type":"ContainerStarted","Data":"288543982dac5f625cc326f4aa511f47e057422196b5545d3e7d1d2fb6d42a55"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.476341 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.476480 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.976450067 +0000 UTC m=+147.991657611 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.476707 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.478170 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:40.978106571 +0000 UTC m=+147.993314175 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.482591 4703 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-p26zs container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.482635 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" podUID="14a43a1a-4b8f-4365-ae50-57a2097ffd5c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.486600 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" event={"ID":"7acae8a8-1cb9-489a-b12a-397d8a5c68a2","Type":"ContainerStarted","Data":"0a3a44625bb17207934b99e5d170d39f0ccc3737018e444bb72682da759cc75b"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.514801 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" event={"ID":"31057271-37ac-4277-9ab0-3ef846dfb6dd","Type":"ContainerStarted","Data":"34def3fb52c57c867c4a3643d62f5f9b5c8b42f1e121edc87039e92bb7f1d669"} Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.516574 4703 patch_prober.go:28] interesting pod/oauth-openshift-558db77b4-m25rz container/oauth-openshift namespace/openshift-authentication: Readiness probe status=failure output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" start-of-body= Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.516619 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" probeResult="failure" output="Get \"https://10.217.0.23:6443/healthz\": dial tcp 10.217.0.23:6443: connect: connection refused" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.517011 4703 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rxr5v container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.517039 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.526575 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-controller-84d6567774-4cth5" podStartSLOduration=124.526554875 podStartE2EDuration="2m4.526554875s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.525765293 +0000 UTC m=+147.540972827" watchObservedRunningTime="2026-02-02 12:53:40.526554875 +0000 UTC m=+147.541762409" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.542400 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/olm-operator-6b444d44fb-pg7ff" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.577768 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.579307 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.079291463 +0000 UTC m=+148.094498987 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.619603 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" podStartSLOduration=124.619585757 podStartE2EDuration="2m4.619585757s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.615133327 +0000 UTC m=+147.630340861" watchObservedRunningTime="2026-02-02 12:53:40.619585757 +0000 UTC m=+147.634793291" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.643796 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:40 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:40 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:40 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.643854 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.668412 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.679662 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.680085 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.180071983 +0000 UTC m=+148.195279507 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.703575 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-service-ca-operator/service-ca-operator-777779d784-427gh" podStartSLOduration=124.703561445 podStartE2EDuration="2m4.703561445s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.703509734 +0000 UTC m=+147.718717278" watchObservedRunningTime="2026-02-02 12:53:40.703561445 +0000 UTC m=+147.718768979" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.761448 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" podStartSLOduration=124.761433632 podStartE2EDuration="2m4.761433632s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.760446695 +0000 UTC m=+147.775654229" watchObservedRunningTime="2026-02-02 12:53:40.761433632 +0000 UTC m=+147.776641156" Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.790749 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.791126 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.29111141 +0000 UTC m=+148.306318944 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.902053 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:40 crc kubenswrapper[4703]: E0202 12:53:40.902617 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.402606549 +0000 UTC m=+148.417814083 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:40 crc kubenswrapper[4703]: I0202 12:53:40.921681 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-api/control-plane-machine-set-operator-78cbb6b69f-z7cwj" podStartSLOduration=124.921667362 podStartE2EDuration="2m4.921667362s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:40.847700772 +0000 UTC m=+147.862908306" watchObservedRunningTime="2026-02-02 12:53:40.921667362 +0000 UTC m=+147.936874896" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.004331 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.004460 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.504439927 +0000 UTC m=+148.519647471 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.004812 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.005167 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.505158996 +0000 UTC m=+148.520366530 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.118842 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.119234 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.619215514 +0000 UTC m=+148.634423048 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.220642 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.221000 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.720984541 +0000 UTC m=+148.736192075 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.322798 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.323562 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.823542469 +0000 UTC m=+148.838750003 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.396806 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-config-operator/openshift-config-operator-7777fb866f-7dn5j" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.429344 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.429632 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:41.929619722 +0000 UTC m=+148.944827256 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.529940 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.530404 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.030385073 +0000 UTC m=+149.045592607 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.587815 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" event={"ID":"4030ecf3-bdf8-48ae-b509-bb7eace24ff9","Type":"ContainerStarted","Data":"6c1856cfec2cc68fd391a620db63ad2ee941f3a5edd63a62c9f636f5995d56ce"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.587873 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" event={"ID":"4030ecf3-bdf8-48ae-b509-bb7eace24ff9","Type":"ContainerStarted","Data":"2a86b3c08c89f34ea52e956c2ecbfdbd844dda4ea5dd2723763cbfb3907d9fce"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.632939 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.633516 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.133499086 +0000 UTC m=+149.148706620 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.640833 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:41 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:41 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:41 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.640885 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.641116 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" event={"ID":"31057271-37ac-4277-9ab0-3ef846dfb6dd","Type":"ContainerStarted","Data":"0a13d6a57d10cbf6bbe076e132f663773c3a2e502b66ba5ac253e83c351faf80"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.659084 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" event={"ID":"e1c02e39-f7b4-4949-ba76-132058e2587c","Type":"ContainerStarted","Data":"1eb7d559023b49c52ce758cc52a1762dcdab58ffd63efdff92d0a5b2ab89b119"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.672970 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" event={"ID":"ef64d2ec-3586-41a5-a2fc-60f0ea764e6c","Type":"ContainerStarted","Data":"984546a773ee7bb56ff39488e4f168a2162c18f876a68a336c3614167f27c1ac"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.711570 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" event={"ID":"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7","Type":"ContainerStarted","Data":"caaa974a57177ccdca89db4ac5757f8d2f1df614f4b4517e2dcad6f390f576b0"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.736079 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.737673 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.237652738 +0000 UTC m=+149.252860292 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.745991 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/cluster-image-registry-operator-dc59b4c8b-8tg7z" podStartSLOduration=126.745968741 podStartE2EDuration="2m6.745968741s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:41.707689432 +0000 UTC m=+148.722896976" watchObservedRunningTime="2026-02-02 12:53:41.745968741 +0000 UTC m=+148.761176275" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.747556 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" event={"ID":"30b86d7c-a747-40c2-b76f-76eec0f9439c","Type":"ContainerStarted","Data":"b418d6f3720b13375fc4b6ec766f2be6cab4843b04a0dc0736962e011f113795"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.765950 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" event={"ID":"acda3264-e715-41ba-bd3f-843e23ecfd82","Type":"ContainerStarted","Data":"988301e613a18db1755980208cc5968a70491e277d2428cb8f0dad0643c1d888"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.765991 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" event={"ID":"acda3264-e715-41ba-bd3f-843e23ecfd82","Type":"ContainerStarted","Data":"ca4caaec8e7f17f94fb799b00ee681cb6b759d526d7e68c9f41490635bd6f352"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.777527 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" event={"ID":"14a43a1a-4b8f-4365-ae50-57a2097ffd5c","Type":"ContainerStarted","Data":"98ade2bcee69484aa29fbdcce4e4b7cc13463193013479d7c3d15b90c594ee4a"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.778575 4703 patch_prober.go:28] interesting pod/catalog-operator-68c6474976-p26zs container/catalog-operator namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" start-of-body= Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.778609 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" podUID="14a43a1a-4b8f-4365-ae50-57a2097ffd5c" containerName="catalog-operator" probeResult="failure" output="Get \"https://10.217.0.24:8443/healthz\": dial tcp 10.217.0.24:8443: connect: connection refused" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.788913 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" event={"ID":"73892a3f-54ec-47ac-9081-5d1a83088794","Type":"ContainerStarted","Data":"b8ac099c79f5da7a6f1b18f26b8607cb88f932a4e2f576ccfef92e8d7a146175"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.793524 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ptg9n" event={"ID":"b9431171-295f-4b2f-9a03-3325d993850c","Type":"ContainerStarted","Data":"72009710d666e04d9c14085c1366ded5636eb91f858984cdc7dd4efc71aa74c5"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.800658 4703 generic.go:334] "Generic (PLEG): container finished" podID="7f576bdf-9ebc-46a8-8079-cb105274bba2" containerID="cb5292bf31d4c05d546517aca49beb4323b5234e168b1204ea01e0e671a649f7" exitCode=0 Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.800745 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" event={"ID":"7f576bdf-9ebc-46a8-8079-cb105274bba2","Type":"ContainerDied","Data":"cb5292bf31d4c05d546517aca49beb4323b5234e168b1204ea01e0e671a649f7"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.805865 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" event={"ID":"cd14d895-97b2-4840-aa5b-ed942b6a89ce","Type":"ContainerStarted","Data":"4e02992741e392b8f524b2d10d482da0bbcc075f210f9516b5ffa3e302b9982b"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.808565 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" event={"ID":"590dfde0-b415-4fb3-85c5-b8d5376617c8","Type":"ContainerStarted","Data":"12a222a035a9b6b2b6b9ea78c2213cc1735ceec4e8470e50992c84d88669c9b5"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.809010 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.812999 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-crnml" event={"ID":"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5","Type":"ContainerStarted","Data":"04d0f8f739e122213747b455dd88423ff066cac974fe7613432a1e1e261ebf2a"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.813051 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-canary/ingress-canary-crnml" event={"ID":"c8fc95ca-6db4-4c2c-94a1-5e420e4a43d5","Type":"ContainerStarted","Data":"55858e4c3399c97e9b500cd998011a19fd8146745ff750f348c08f00e4ef4adc"} Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.813526 4703 patch_prober.go:28] interesting pod/marketplace-operator-79b997595-rxr5v container/marketplace-operator namespace/openshift-marketplace: Readiness probe status=failure output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" start-of-body= Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.813566 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" probeResult="failure" output="Get \"http://10.217.0.17:8080/healthz\": dial tcp 10.217.0.17:8080: connect: connection refused" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.821145 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-oauth-apiserver/apiserver-7bbb656c7d-hh7ph" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.822352 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator-operator/kube-storage-version-migrator-operator-b67b599dd-z8msl" podStartSLOduration=125.822263634 podStartE2EDuration="2m5.822263634s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:41.747877213 +0000 UTC m=+148.763084757" watchObservedRunningTime="2026-02-02 12:53:41.822263634 +0000 UTC m=+148.837471168" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.822964 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-storage-version-migrator/migrator-59844c95c7-ksnn8" podStartSLOduration=125.822960032 podStartE2EDuration="2m5.822960032s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:41.821256066 +0000 UTC m=+148.836463600" watchObservedRunningTime="2026-02-02 12:53:41.822960032 +0000 UTC m=+148.838167556" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.823839 4703 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-56wp2 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.823877 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" podUID="590dfde0-b415-4fb3-85c5-b8d5376617c8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.841892 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.842308 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.342293252 +0000 UTC m=+149.357500786 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:41 crc kubenswrapper[4703]: I0202 12:53:41.943675 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:41 crc kubenswrapper[4703]: E0202 12:53:41.945338 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.445315083 +0000 UTC m=+149.460522687 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.047575 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.048424 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.548407656 +0000 UTC m=+149.563615190 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.149201 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.149668 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.649650389 +0000 UTC m=+149.664857923 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.234759 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.250568 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.250915 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.750902642 +0000 UTC m=+149.766110176 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.263739 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" podStartSLOduration=126.263721597 podStartE2EDuration="2m6.263721597s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:42.263435029 +0000 UTC m=+149.278642573" watchObservedRunningTime="2026-02-02 12:53:42.263721597 +0000 UTC m=+149.278929131" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.264524 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-canary/ingress-canary-crnml" podStartSLOduration=9.264516428 podStartE2EDuration="9.264516428s" podCreationTimestamp="2026-02-02 12:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:42.109789447 +0000 UTC m=+149.124996981" watchObservedRunningTime="2026-02-02 12:53:42.264516428 +0000 UTC m=+149.279723972" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.351359 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.351819 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.851798486 +0000 UTC m=+149.867006020 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.454137 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.454486 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:42.954469697 +0000 UTC m=+149.969677231 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.555378 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.555557 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.055528245 +0000 UTC m=+150.070735769 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.555743 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.556154 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.056146692 +0000 UTC m=+150.071354226 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.626881 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:42 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:42 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:42 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.626954 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.657936 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.658163 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.158130235 +0000 UTC m=+150.173337789 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.658263 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.658683 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.158663569 +0000 UTC m=+150.173871133 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.759734 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.759899 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.259865261 +0000 UTC m=+150.275072815 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.760339 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.760707 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.260695273 +0000 UTC m=+150.275902867 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.821425 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" event={"ID":"45adf198-876a-4b6e-b75e-348e84079e86","Type":"ContainerStarted","Data":"8e9146b70a1bdeac484d8953ef7b68e183619f3905cbf2679f994e277d1b237a"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.825588 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" event={"ID":"31057271-37ac-4277-9ab0-3ef846dfb6dd","Type":"ContainerStarted","Data":"f7a6b3e3ef90bb5edff2ef989c11fb4d4b97cb13245d04063e7a9f279e63634d"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.836046 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" event={"ID":"4030ecf3-bdf8-48ae-b509-bb7eace24ff9","Type":"ContainerStarted","Data":"78d1765e7fd723085fd688ff3f908b74a84b1dd9315de4a5f152772620cf347d"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.841753 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ptg9n" event={"ID":"b9431171-295f-4b2f-9a03-3325d993850c","Type":"ContainerStarted","Data":"352afb5c4d541ac475ecfe0f90ed2a918ca4841bdc67b313b5d6c5040f98f2d8"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.841868 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-dns/dns-default-ptg9n" event={"ID":"b9431171-295f-4b2f-9a03-3325d993850c","Type":"ContainerStarted","Data":"ebf74f1b31f2bdb720088e2a688994b890d1208aa3408c74371b21a995c72f00"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.841981 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.844985 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" event={"ID":"cd14d895-97b2-4840-aa5b-ed942b6a89ce","Type":"ContainerStarted","Data":"a85a7fb8176d94b7bdb7790849b7a47b964c48ea960ab19691d11631c4c3edc1"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.847525 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" event={"ID":"ffa77803-cf30-4031-a0b2-c6ad1a66ffd7","Type":"ContainerStarted","Data":"5999866db371597d9f4e49f372b62f9624c5a14c0f71a124f93067374b20af2a"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.849350 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" event={"ID":"30b86d7c-a747-40c2-b76f-76eec0f9439c","Type":"ContainerStarted","Data":"73d2fb50f15282973715167013ea0a9a05df7ae6cd93bc73f3f7b22c151180a7"} Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.850568 4703 patch_prober.go:28] interesting pod/packageserver-d55dfcdfc-56wp2 container/packageserver namespace/openshift-operator-lifecycle-manager: Readiness probe status=failure output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" start-of-body= Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.850623 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" podUID="590dfde0-b415-4fb3-85c5-b8d5376617c8" containerName="packageserver" probeResult="failure" output="Get \"https://10.217.0.39:5443/healthz\": dial tcp 10.217.0.39:5443: connect: connection refused" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.867188 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.867389 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.367343682 +0000 UTC m=+150.382551216 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.867935 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.887438 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.387401991 +0000 UTC m=+150.402609525 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.903293 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/catalog-operator-68c6474976-p26zs" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.933658 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns-operator/dns-operator-744455d44c-9jft7" podStartSLOduration=127.933642585 podStartE2EDuration="2m7.933642585s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:42.887903755 +0000 UTC m=+149.903111299" watchObservedRunningTime="2026-02-02 12:53:42.933642585 +0000 UTC m=+149.948850119" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.936222 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ingress-operator/ingress-operator-5b745b69d9-czmb9" podStartSLOduration=127.936208134 podStartE2EDuration="2m7.936208134s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:42.931748064 +0000 UTC m=+149.946955618" watchObservedRunningTime="2026-02-02 12:53:42.936208134 +0000 UTC m=+149.951415668" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.971879 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/multus-admission-controller-857f4d67dd-q4xdj" podStartSLOduration=126.971852483 podStartE2EDuration="2m6.971852483s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:42.970094495 +0000 UTC m=+149.985302049" watchObservedRunningTime="2026-02-02 12:53:42.971852483 +0000 UTC m=+149.987060007" Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.974614 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.975017 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.474993157 +0000 UTC m=+150.490200691 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:42 crc kubenswrapper[4703]: I0202 12:53:42.975091 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:42 crc kubenswrapper[4703]: E0202 12:53:42.975735 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.475724627 +0000 UTC m=+150.490932161 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.003660 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager-operator/kube-controller-manager-operator-78b949d7b-9vl8k" podStartSLOduration=128.003598377 podStartE2EDuration="2m8.003598377s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:43.000460122 +0000 UTC m=+150.015667656" watchObservedRunningTime="2026-02-02 12:53:43.003598377 +0000 UTC m=+150.018805911" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.077078 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.077591 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.577559506 +0000 UTC m=+150.592767040 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.172713 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-scheduler-operator/openshift-kube-scheduler-operator-5fdd9b5758-2g7gn" podStartSLOduration=128.172685154 podStartE2EDuration="2m8.172685154s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:43.170623579 +0000 UTC m=+150.185831113" watchObservedRunningTime="2026-02-02 12:53:43.172685154 +0000 UTC m=+150.187892688" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.179158 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.179577 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.679558809 +0000 UTC m=+150.694766343 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.233644 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-dns/dns-default-ptg9n" podStartSLOduration=10.233626233 podStartE2EDuration="10.233626233s" podCreationTimestamp="2026-02-02 12:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:43.231684841 +0000 UTC m=+150.246892375" watchObservedRunningTime="2026-02-02 12:53:43.233626233 +0000 UTC m=+150.248833767" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.280849 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.280992 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.780965936 +0000 UTC m=+150.796173470 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.281241 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.281643 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.781633534 +0000 UTC m=+150.796841078 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.319827 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-machine-config-operator/machine-config-operator-74547568cd-vn68g" podStartSLOduration=127.319809381 podStartE2EDuration="2m7.319809381s" podCreationTimestamp="2026-02-02 12:51:36 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:43.275997373 +0000 UTC m=+150.291204927" watchObservedRunningTime="2026-02-02 12:53:43.319809381 +0000 UTC m=+150.335016915" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.384769 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.385220 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.88519818 +0000 UTC m=+150.900405714 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.488988 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.489385 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:43.989367412 +0000 UTC m=+151.004574946 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.595879 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.596374 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.096355989 +0000 UTC m=+151.111563523 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.633213 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:43 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:43 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:43 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.633875 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.697488 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.697790 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.197778167 +0000 UTC m=+151.212985691 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.798738 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.799113 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.299097362 +0000 UTC m=+151.314304886 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.873811 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" event={"ID":"45adf198-876a-4b6e-b75e-348e84079e86","Type":"ContainerStarted","Data":"f275d6cdca647ebf35700a5f99d8d20046bc3ae469599821b06d8001fd4b32bf"} Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.883355 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" event={"ID":"7f576bdf-9ebc-46a8-8079-cb105274bba2","Type":"ContainerStarted","Data":"884698df962bfd6194a73c8d5e1259833a879e2b4ac0cd11c09c5064e36683cb"} Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.900567 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.900641 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.900684 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.900712 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.900742 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:43 crc kubenswrapper[4703]: E0202 12:53:43.901123 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.401102236 +0000 UTC m=+151.416309820 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.910307 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"nginx-conf\" (UniqueName: \"kubernetes.io/configmap/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-nginx-conf\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.911924 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"networking-console-plugin-cert\" (UniqueName: \"kubernetes.io/secret/5fe485a1-e14f-4c09-b5b9-f252bc42b7e8-networking-console-plugin-cert\") pod \"networking-console-plugin-85b44fc459-gdk6g\" (UID: \"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8\") " pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.912472 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cqllr\" (UniqueName: \"kubernetes.io/projected/3b6479f0-333b-4a96-9adf-2099afdc2447-kube-api-access-cqllr\") pod \"network-check-target-xd92c\" (UID: \"3b6479f0-333b-4a96-9adf-2099afdc2447\") " pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:43 crc kubenswrapper[4703]: I0202 12:53:43.915863 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s2dwl\" (UniqueName: \"kubernetes.io/projected/9d751cbb-f2e2-430d-9754-c882a5e924a5-kube-api-access-s2dwl\") pod \"network-check-source-55646444c4-trplf\" (UID: \"9d751cbb-f2e2-430d-9754-c882a5e924a5\") " pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.002015 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.003387 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.503370756 +0000 UTC m=+151.518578280 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.046134 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.107238 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.107689 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.607666801 +0000 UTC m=+151.622874335 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.155415 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.162846 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.210937 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.211335 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.711314509 +0000 UTC m=+151.726522053 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.312164 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.312550 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.812534962 +0000 UTC m=+151.827742496 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.413366 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.413748 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:44.913728043 +0000 UTC m=+151.928935587 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.516431 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.517404 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.017384591 +0000 UTC m=+152.032592125 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.620051 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.620091 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.120074012 +0000 UTC m=+152.135281546 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.620521 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.620991 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.120978067 +0000 UTC m=+152.136185601 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.645890 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:44 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:44 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:44 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.645943 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.722825 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.723227 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.223211326 +0000 UTC m=+152.238418850 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.802558 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.804011 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.814635 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.823839 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.825096 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.825518 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.325503848 +0000 UTC m=+152.340711392 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.911828 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"d9b7920ef0a6dcdfaf4ed5939d3687cccc8fbb39d28c857f56c5244cf6154e82"} Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.914117 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" event={"ID":"7f576bdf-9ebc-46a8-8079-cb105274bba2","Type":"ContainerStarted","Data":"959b9246a3735abd15d2a12e563c86668a23bcf4581879e8b57f1e6954c47a70"} Feb 02 12:53:44 crc kubenswrapper[4703]: W0202 12:53:44.949932 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod9d751cbb_f2e2_430d_9754_c882a5e924a5.slice/crio-b3c7ce438377257d9d34781d3c9e6aeb3a3018ddacf7236379ba4c6d9e2f993b WatchSource:0}: Error finding container b3c7ce438377257d9d34781d3c9e6aeb3a3018ddacf7236379ba4c6d9e2f993b: Status 404 returned error can't find the container with id b3c7ce438377257d9d34781d3c9e6aeb3a3018ddacf7236379ba4c6d9e2f993b Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.951126 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" event={"ID":"45adf198-876a-4b6e-b75e-348e84079e86","Type":"ContainerStarted","Data":"b2224286bbb8ff9bc3b183eba52a6a502ad867806f65a0456a4b9bd9df1e591b"} Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.955393 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.955522 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.455498434 +0000 UTC m=+152.470705978 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.956172 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kb4p8\" (UniqueName: \"kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.956316 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.956544 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.956701 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:44 crc kubenswrapper[4703]: E0202 12:53:44.959121 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.459098311 +0000 UTC m=+152.474305845 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.964608 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" podStartSLOduration=129.964587598 podStartE2EDuration="2m9.964587598s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:44.960413276 +0000 UTC m=+151.975620820" watchObservedRunningTime="2026-02-02 12:53:44.964587598 +0000 UTC m=+151.979795122" Feb 02 12:53:44 crc kubenswrapper[4703]: I0202 12:53:44.982846 4703 plugin_watcher.go:194] "Adding socket path or updating timestamp to desired state cache" path="/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.014957 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.016640 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.020223 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.033155 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060098 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060330 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060380 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kb4p8\" (UniqueName: \"kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060406 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060434 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060460 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-b58px\" (UniqueName: \"kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060493 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.060971 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: E0202 12:53:45.061062 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.561043163 +0000 UTC m=+152.576250697 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.061632 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.086962 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kb4p8\" (UniqueName: \"kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8\") pod \"community-operators-zf6z4\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.163072 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.163119 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-b58px\" (UniqueName: \"kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.163161 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.163236 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.164175 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:53:45 crc kubenswrapper[4703]: E0202 12:53:45.164530 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.664514266 +0000 UTC m=+152.679721800 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.164966 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.165123 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.198869 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.200784 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.211130 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-b58px\" (UniqueName: \"kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px\") pod \"certified-operators-kbjrd\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.224977 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.235423 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.235480 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.259382 4703 patch_prober.go:28] interesting pod/console-f9d7485db-bb2nk container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.259435 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-bb2nk" podUID="cb600eb2-3a8a-4303-b99b-a6c40cd9149e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.265890 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266178 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vpwgc\" (UniqueName: \"kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266299 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266400 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: E0202 12:53:45.266675 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName:8f668bae-612b-4b75-9490-919e737c6a3b nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.766648523 +0000 UTC m=+152.781856067 (durationBeforeRetry 500ms). Error: UnmountVolume.TearDown failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b") : kubernetes.io/csi: Unmounter.TearDownAt failed to get CSI client: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266772 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266814 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266771 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.266904 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.273287 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.362875 4703 reconciler.go:161] "OperationExecutor.RegisterPlugin started" plugin={"SocketPath":"/var/lib/kubelet/plugins_registry/kubevirt.io.hostpath-provisioner-reg.sock","Timestamp":"2026-02-02T12:53:44.982905081Z","Handler":null,"Name":""} Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.368179 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.368254 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.368305 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vpwgc\" (UniqueName: \"kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.368398 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: E0202 12:53:45.368672 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8 podName: nodeName:}" failed. No retries permitted until 2026-02-02 12:53:45.868657046 +0000 UTC m=+152.883864580 (durationBeforeRetry 500ms). Error: MountVolume.MountDevice failed for volume "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "image-registry-697d97f7c8-wrpp2" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c") : kubernetes.io/csi: attacher.MountDevice failed to create newCsiDriverClient: driver name kubevirt.io.hostpath-provisioner not found in the list of registered CSI drivers Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.369235 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.369664 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.387488 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-4qklh"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.388661 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.406883 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qklh"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.407407 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vpwgc\" (UniqueName: \"kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc\") pod \"community-operators-z28wg\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.427297 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.467179 4703 csi_plugin.go:100] kubernetes.io/csi: Trying to validate a new CSI Driver with name: kubevirt.io.hostpath-provisioner endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock versions: 1.0.0 Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.467218 4703 csi_plugin.go:113] kubernetes.io/csi: Register new plugin with name: kubevirt.io.hostpath-provisioner at endpoint: /var/lib/kubelet/plugins/csi-hostpath/csi.sock Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.470745 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"8f668bae-612b-4b75-9490-919e737c6a3b\" (UID: \"8f668bae-612b-4b75-9490-919e737c6a3b\") " Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.471077 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qlwwm\" (UniqueName: \"kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.471104 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.471182 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.475535 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8") pod "8f668bae-612b-4b75-9490-919e737c6a3b" (UID: "8f668bae-612b-4b75-9490-919e737c6a3b"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.572741 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qlwwm\" (UniqueName: \"kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.573114 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.573168 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.573223 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.573381 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.574121 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.574358 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.590445 4703 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.590486 4703 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/1f4776af88835e41c12b831b4c9fed40233456d14189815a54dbe7f892fc1983/globalmount\"" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.608753 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qlwwm\" (UniqueName: \"kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm\") pod \"certified-operators-4qklh\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.640777 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:45 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:45 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:45 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.640860 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.649950 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-697d97f7c8-wrpp2\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.696617 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.703831 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.811063 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.952215 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8f668bae-612b-4b75-9490-919e737c6a3b" path="/var/lib/kubelet/pods/8f668bae-612b-4b75-9490-919e737c6a3b/volumes" Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.986121 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:53:45 crc kubenswrapper[4703]: I0202 12:53:45.986180 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:45.992430 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"36bf5fe0e3600666459117015a3590b9c9277f98b3669761ee32ea4e1c4f5b75"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:45.992492 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-source-55646444c4-trplf" event={"ID":"9d751cbb-f2e2-430d-9754-c882a5e924a5","Type":"ContainerStarted","Data":"b3c7ce438377257d9d34781d3c9e6aeb3a3018ddacf7236379ba4c6d9e2f993b"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:45.999607 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerStarted","Data":"bd9a6cc9cc35dbda142633aa3f407634746cfd2d3ad895007b8457a86527b35a"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.015549 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" event={"ID":"45adf198-876a-4b6e-b75e-348e84079e86","Type":"ContainerStarted","Data":"e18793bd3df739ac51fdc93bc914d2e49480669866faa9f2f7134af49b9b0263"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.021406 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"5e06034a3fb0a6e6f7409203e94666f28a8998ee61411681cbfb8b469243498e"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.021490 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-console/networking-console-plugin-85b44fc459-gdk6g" event={"ID":"5fe485a1-e14f-4c09-b5b9-f252bc42b7e8","Type":"ContainerStarted","Data":"6868de5df7f32b43098fd0aa665f4b7bdc44e8f4d131cfaa2b54631adb66b2c6"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.032982 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-network-diagnostics/network-check-target-xd92c" event={"ID":"3b6479f0-333b-4a96-9adf-2099afdc2447","Type":"ContainerStarted","Data":"bbd0ad95567be212e26c80a28a5614df0728d3182615c040bac5cb3ba7e35e1e"} Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.033023 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.033086 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.057064 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="hostpath-provisioner/csi-hostpathplugin-sbswk" podStartSLOduration=13.057048021 podStartE2EDuration="13.057048021s" podCreationTimestamp="2026-02-02 12:53:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:46.050719981 +0000 UTC m=+153.065927535" watchObservedRunningTime="2026-02-02 12:53:46.057048021 +0000 UTC m=+153.072255555" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.082611 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.391372 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-4qklh"] Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.474537 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.610184 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.619458 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.638321 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:46 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:46 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:46 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.638680 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.998558 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:53:46 crc kubenswrapper[4703]: I0202 12:53:46.999683 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.002496 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.011335 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.011443 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.015647 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/packageserver-d55dfcdfc-56wp2" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.026057 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.026148 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.026211 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8t5z5\" (UniqueName: \"kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.034065 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.054178 4703 generic.go:334] "Generic (PLEG): container finished" podID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerID="41350c444ae8c56273d87552085cf1ee20135ae2c2fd03466d7543796c2f8ab5" exitCode=0 Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.068346 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerDied","Data":"41350c444ae8c56273d87552085cf1ee20135ae2c2fd03466d7543796c2f8ab5"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.070910 4703 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.075219 4703 generic.go:334] "Generic (PLEG): container finished" podID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerID="3c6bad9340025e13170315751ebb42512380a1f6ca2b47ab1f45316dae5ad4ee" exitCode=0 Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.075401 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerDied","Data":"3c6bad9340025e13170315751ebb42512380a1f6ca2b47ab1f45316dae5ad4ee"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.075492 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerStarted","Data":"1462caadd7b21ee3b077c72dc6d2e86ec0c5921a7a9b81c387952d9ac4abbc91"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.088002 4703 generic.go:334] "Generic (PLEG): container finished" podID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerID="a260c39214e69c38c4521a147e4262d8e3738fbf85c1396a7975290ae5e5c7db" exitCode=0 Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.088073 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerDied","Data":"a260c39214e69c38c4521a147e4262d8e3738fbf85c1396a7975290ae5e5c7db"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.088099 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerStarted","Data":"d9158735b42076e56358baec39dac37c64d64b5f0dbc40222014a3b76857c4b1"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.098756 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" event={"ID":"5654372a-377f-47b2-a476-6f1a55395e6c","Type":"ContainerStarted","Data":"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.105169 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" event={"ID":"5654372a-377f-47b2-a476-6f1a55395e6c","Type":"ContainerStarted","Data":"b5a45dbf12f7fc27e4b85090f5118017fae787bd11378eea4da1eb8b68b8f670"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.105695 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.125991 4703 generic.go:334] "Generic (PLEG): container finished" podID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerID="e3c8267aa3187ecdd304b8aa7bc02c78b43ace432b331064806e8286e7c747ed" exitCode=0 Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.126070 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerDied","Data":"e3c8267aa3187ecdd304b8aa7bc02c78b43ace432b331064806e8286e7c747ed"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.126103 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerStarted","Data":"cfa3cedc876fb78ba9c12c247b8e1bdc11826b9f9c00cfc2f339172fb4cd6b57"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.129317 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.129417 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.129537 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8t5z5\" (UniqueName: \"kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.130094 4703 generic.go:334] "Generic (PLEG): container finished" podID="afc660f4-96eb-4013-b703-0967895a611b" containerID="a0a72e0182b9a249207dc0121dc975f347391e823f261fa2c90532227bef7d1a" exitCode=0 Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.130542 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" event={"ID":"afc660f4-96eb-4013-b703-0967895a611b","Type":"ContainerDied","Data":"a0a72e0182b9a249207dc0121dc975f347391e823f261fa2c90532227bef7d1a"} Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.131254 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.131731 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.170600 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8t5z5\" (UniqueName: \"kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5\") pod \"redhat-marketplace-7c4qb\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.245198 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" podStartSLOduration=132.245182697 podStartE2EDuration="2m12.245182697s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:47.242934346 +0000 UTC m=+154.258141880" watchObservedRunningTime="2026-02-02 12:53:47.245182697 +0000 UTC m=+154.260390231" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.322978 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.391320 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-wn5cj"] Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.392981 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.406221 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn5cj"] Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.433626 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.433872 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-854hb\" (UniqueName: \"kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.434000 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.534991 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-854hb\" (UniqueName: \"kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.535062 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.535121 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.535697 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.536241 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.573020 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-854hb\" (UniqueName: \"kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb\") pod \"redhat-marketplace-wn5cj\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.629245 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:47 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:47 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:47 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.629723 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.734665 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.759976 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.912381 4703 patch_prober.go:28] interesting pod/apiserver-76f77b778f-lbjjm container/openshift-apiserver namespace/openshift-apiserver: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[+]ping ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]log ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]etcd ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/start-apiserver-admission-initializer ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/generic-apiserver-start-informers ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/max-in-flight-filter ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/storage-object-count-tracker-hook ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/image.openshift.io-apiserver-caches ok Feb 02 12:53:47 crc kubenswrapper[4703]: [-]poststarthook/authorization.openshift.io-bootstrapclusterroles failed: reason withheld Feb 02 12:53:47 crc kubenswrapper[4703]: [-]poststarthook/authorization.openshift.io-ensurenodebootstrap-sa failed: reason withheld Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/project.openshift.io-projectcache ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/project.openshift.io-projectauthorizationcache ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/openshift.io-startinformers ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/openshift.io-restmapperupdater ok Feb 02 12:53:47 crc kubenswrapper[4703]: [+]poststarthook/quota.openshift.io-clusterquotamapping ok Feb 02 12:53:47 crc kubenswrapper[4703]: livez check failed Feb 02 12:53:47 crc kubenswrapper[4703]: I0202 12:53:47.912525 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" podUID="7f576bdf-9ebc-46a8-8079-cb105274bba2" containerName="openshift-apiserver" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:47.996030 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.000389 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.015341 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.022286 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.022414 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.023158 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager"/"kube-root-ca.crt" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.026894 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.027246 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager"/"installer-sa-dockercfg-kjl2n" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.043838 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.044212 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.044308 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.044373 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zwnl7\" (UniqueName: \"kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.044421 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.044445 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.147758 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zwnl7\" (UniqueName: \"kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.148552 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.148602 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.148665 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.148751 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.149521 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.150885 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.160159 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerStarted","Data":"be79909a446c6cbd804d164fc3928c82e01dab48fbb404bd2fa7bd31fceee2e3"} Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.164706 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.194837 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zwnl7\" (UniqueName: \"kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7\") pod \"redhat-operators-rzrkb\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.200377 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.252673 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn5cj"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.358732 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.363609 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.399419 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.403093 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.423833 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.567858 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.568585 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-tnprx\" (UniqueName: \"kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.568715 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.634040 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:48 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:48 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:48 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.634110 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.670520 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.670586 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.670622 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-tnprx\" (UniqueName: \"kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.671444 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.671519 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.671530 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.703658 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-tnprx\" (UniqueName: \"kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx\") pod \"redhat-operators-jwrr8\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.756474 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.772112 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume\") pod \"afc660f4-96eb-4013-b703-0967895a611b\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.772236 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume\") pod \"afc660f4-96eb-4013-b703-0967895a611b\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.775469 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume" (OuterVolumeSpecName: "config-volume") pod "afc660f4-96eb-4013-b703-0967895a611b" (UID: "afc660f4-96eb-4013-b703-0967895a611b"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.784988 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "afc660f4-96eb-4013-b703-0967895a611b" (UID: "afc660f4-96eb-4013-b703-0967895a611b"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.831560 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 12:53:48 crc kubenswrapper[4703]: E0202 12:53:48.832327 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="afc660f4-96eb-4013-b703-0967895a611b" containerName="collect-profiles" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.832343 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="afc660f4-96eb-4013-b703-0967895a611b" containerName="collect-profiles" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.832492 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="afc660f4-96eb-4013-b703-0967895a611b" containerName="collect-profiles" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.833149 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.840548 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.841262 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.843965 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.874064 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xz4ld\" (UniqueName: \"kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld\") pod \"afc660f4-96eb-4013-b703-0967895a611b\" (UID: \"afc660f4-96eb-4013-b703-0967895a611b\") " Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.874501 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.874648 4703 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/afc660f4-96eb-4013-b703-0967895a611b-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.874732 4703 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/afc660f4-96eb-4013-b703-0967895a611b-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.887765 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld" (OuterVolumeSpecName: "kube-api-access-xz4ld") pod "afc660f4-96eb-4013-b703-0967895a611b" (UID: "afc660f4-96eb-4013-b703-0967895a611b"). InnerVolumeSpecName "kube-api-access-xz4ld". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.975974 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.976028 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.976106 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xz4ld\" (UniqueName: \"kubernetes.io/projected/afc660f4-96eb-4013-b703-0967895a611b-kube-api-access-xz4ld\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.976154 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:48 crc kubenswrapper[4703]: I0202 12:53:48.979768 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-controller-manager/revision-pruner-9-crc"] Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.047653 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.080298 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.108110 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access\") pod \"revision-pruner-8-crc\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:49 crc kubenswrapper[4703]: W0202 12:53:49.116083 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0331857a_b571_4128_9927_b2cc5dd58969.slice/crio-857983b80f77874fe31978f30b88c7736aaf8e0dfbf482093daa28964b60197f WatchSource:0}: Error finding container 857983b80f77874fe31978f30b88c7736aaf8e0dfbf482093daa28964b60197f: Status 404 returned error can't find the container with id 857983b80f77874fe31978f30b88c7736aaf8e0dfbf482093daa28964b60197f Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.177081 4703 generic.go:334] "Generic (PLEG): container finished" podID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerID="22062f341ab3d02fea37e8eba60cc9190d9f3513b44ce3c74e499cfe29bbf758" exitCode=0 Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.177154 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerDied","Data":"22062f341ab3d02fea37e8eba60cc9190d9f3513b44ce3c74e499cfe29bbf758"} Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.180257 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" event={"ID":"afc660f4-96eb-4013-b703-0967895a611b","Type":"ContainerDied","Data":"9e822584faa754b4b209409c711249e85bd5561afb3a1c1882291e8df77859a9"} Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.180316 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="9e822584faa754b4b209409c711249e85bd5561afb3a1c1882291e8df77859a9" Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.180473 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500605-9spmm" Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.181813 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.188781 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerStarted","Data":"857983b80f77874fe31978f30b88c7736aaf8e0dfbf482093daa28964b60197f"} Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.190189 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.192478 4703 generic.go:334] "Generic (PLEG): container finished" podID="c6f1054c-67cd-46bb-b781-62716d75231a" containerID="434ffea7da1ba510463a499d39e93f97217a52714ef42b1581895b3830126f44" exitCode=0 Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.192548 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerDied","Data":"434ffea7da1ba510463a499d39e93f97217a52714ef42b1581895b3830126f44"} Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.192616 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerStarted","Data":"22d9322fd663fc4618c399222addb1c6ad30e3644f0047778c267dd8bc2e86b2"} Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.217403 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a6e53b52-19fe-43a8-99c4-20424253942d","Type":"ContainerStarted","Data":"21fbb7743e5a1330c1e933e410f6c0595f35e40f48f5bba74131733f997400c0"} Feb 02 12:53:49 crc kubenswrapper[4703]: W0202 12:53:49.269334 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod6a692829_23d1_4ed4_954d_33ded09c5a25.slice/crio-bde41cf39020cd08f1aeadf7d76940e006cf291a017f9dfa6ce0dcdb371cd852 WatchSource:0}: Error finding container bde41cf39020cd08f1aeadf7d76940e006cf291a017f9dfa6ce0dcdb371cd852: Status 404 returned error can't find the container with id bde41cf39020cd08f1aeadf7d76940e006cf291a017f9dfa6ce0dcdb371cd852 Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.622334 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:49 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:49 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:49 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.623010 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:49 crc kubenswrapper[4703]: I0202 12:53:49.640818 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-8-crc"] Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.230736 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"aa448388-6971-479d-a86e-1429a06f8f4a","Type":"ContainerStarted","Data":"51e5689a61c732fdad770208292500f2ef667f1983d3c17cc23a1946c9425c0a"} Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.237047 4703 generic.go:334] "Generic (PLEG): container finished" podID="0331857a-b571-4128-9927-b2cc5dd58969" containerID="a77513de6f0577a48d44d7d0636cf7b9bc936073798792d2e2886ccceaab1ee3" exitCode=0 Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.237187 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerDied","Data":"a77513de6f0577a48d44d7d0636cf7b9bc936073798792d2e2886ccceaab1ee3"} Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.240366 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a6e53b52-19fe-43a8-99c4-20424253942d","Type":"ContainerStarted","Data":"ed53fc4d20c2fbe978d1ace374646ec369d026fddb950b556144e6aed79b8f41"} Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.255343 4703 generic.go:334] "Generic (PLEG): container finished" podID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerID="3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819" exitCode=0 Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.255413 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerDied","Data":"3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819"} Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.255450 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerStarted","Data":"bde41cf39020cd08f1aeadf7d76940e006cf291a017f9dfa6ce0dcdb371cd852"} Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.300618 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-controller-manager/revision-pruner-9-crc" podStartSLOduration=3.300598513 podStartE2EDuration="3.300598513s" podCreationTimestamp="2026-02-02 12:53:47 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:50.275892388 +0000 UTC m=+157.291099922" watchObservedRunningTime="2026-02-02 12:53:50.300598513 +0000 UTC m=+157.315806037" Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.625439 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:50 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:50 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:50 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:50 crc kubenswrapper[4703]: I0202 12:53:50.625971 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.280974 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"aa448388-6971-479d-a86e-1429a06f8f4a","Type":"ContainerStarted","Data":"0a54ab3f0e0c474661207d9dd0381f73d6dfc1852feccdf1dfaa692aef76272e"} Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.298348 4703 generic.go:334] "Generic (PLEG): container finished" podID="a6e53b52-19fe-43a8-99c4-20424253942d" containerID="ed53fc4d20c2fbe978d1ace374646ec369d026fddb950b556144e6aed79b8f41" exitCode=0 Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.298406 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a6e53b52-19fe-43a8-99c4-20424253942d","Type":"ContainerDied","Data":"ed53fc4d20c2fbe978d1ace374646ec369d026fddb950b556144e6aed79b8f41"} Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.302903 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/revision-pruner-8-crc" podStartSLOduration=3.302885351 podStartE2EDuration="3.302885351s" podCreationTimestamp="2026-02-02 12:53:48 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:53:51.29990557 +0000 UTC m=+158.315113114" watchObservedRunningTime="2026-02-02 12:53:51.302885351 +0000 UTC m=+158.318092875" Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.650388 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:51 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:51 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:51 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:51 crc kubenswrapper[4703]: I0202 12:53:51.650456 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.015108 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.024640 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-apiserver/apiserver-76f77b778f-lbjjm" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.033856 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-dns/dns-default-ptg9n" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.324063 4703 generic.go:334] "Generic (PLEG): container finished" podID="aa448388-6971-479d-a86e-1429a06f8f4a" containerID="0a54ab3f0e0c474661207d9dd0381f73d6dfc1852feccdf1dfaa692aef76272e" exitCode=0 Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.324123 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"aa448388-6971-479d-a86e-1429a06f8f4a","Type":"ContainerDied","Data":"0a54ab3f0e0c474661207d9dd0381f73d6dfc1852feccdf1dfaa692aef76272e"} Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.620892 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:52 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:52 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:52 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.621253 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.742014 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.883021 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir\") pod \"a6e53b52-19fe-43a8-99c4-20424253942d\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.883175 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access\") pod \"a6e53b52-19fe-43a8-99c4-20424253942d\" (UID: \"a6e53b52-19fe-43a8-99c4-20424253942d\") " Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.883508 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "a6e53b52-19fe-43a8-99c4-20424253942d" (UID: "a6e53b52-19fe-43a8-99c4-20424253942d"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.884700 4703 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/a6e53b52-19fe-43a8-99c4-20424253942d-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.899181 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "a6e53b52-19fe-43a8-99c4-20424253942d" (UID: "a6e53b52-19fe-43a8-99c4-20424253942d"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:53:52 crc kubenswrapper[4703]: I0202 12:53:52.985973 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/a6e53b52-19fe-43a8-99c4-20424253942d-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:53 crc kubenswrapper[4703]: I0202 12:53:53.480809 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/revision-pruner-9-crc" event={"ID":"a6e53b52-19fe-43a8-99c4-20424253942d","Type":"ContainerDied","Data":"21fbb7743e5a1330c1e933e410f6c0595f35e40f48f5bba74131733f997400c0"} Feb 02 12:53:53 crc kubenswrapper[4703]: I0202 12:53:53.481160 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="21fbb7743e5a1330c1e933e410f6c0595f35e40f48f5bba74131733f997400c0" Feb 02 12:53:53 crc kubenswrapper[4703]: I0202 12:53:53.480880 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-controller-manager/revision-pruner-9-crc" Feb 02 12:53:53 crc kubenswrapper[4703]: I0202 12:53:53.620931 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:53 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:53 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:53 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:53 crc kubenswrapper[4703]: I0202 12:53:53.620983 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.190527 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.316447 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir\") pod \"aa448388-6971-479d-a86e-1429a06f8f4a\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.316541 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access\") pod \"aa448388-6971-479d-a86e-1429a06f8f4a\" (UID: \"aa448388-6971-479d-a86e-1429a06f8f4a\") " Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.316864 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "aa448388-6971-479d-a86e-1429a06f8f4a" (UID: "aa448388-6971-479d-a86e-1429a06f8f4a"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.322144 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "aa448388-6971-479d-a86e-1429a06f8f4a" (UID: "aa448388-6971-479d-a86e-1429a06f8f4a"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.418781 4703 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/aa448388-6971-479d-a86e-1429a06f8f4a-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.418821 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/aa448388-6971-479d-a86e-1429a06f8f4a-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.519162 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-8-crc" event={"ID":"aa448388-6971-479d-a86e-1429a06f8f4a","Type":"ContainerDied","Data":"51e5689a61c732fdad770208292500f2ef667f1983d3c17cc23a1946c9425c0a"} Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.519210 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="51e5689a61c732fdad770208292500f2ef667f1983d3c17cc23a1946c9425c0a" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.519305 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-8-crc" Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.619039 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:54 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:54 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:54 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:54 crc kubenswrapper[4703]: I0202 12:53:54.619502 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.231528 4703 patch_prober.go:28] interesting pod/console-f9d7485db-bb2nk container/console namespace/openshift-console: Startup probe status=failure output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" start-of-body= Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.231595 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-console/console-f9d7485db-bb2nk" podUID="cb600eb2-3a8a-4303-b99b-a6c40cd9149e" containerName="console" probeResult="failure" output="Get \"https://10.217.0.21:8443/health\": dial tcp 10.217.0.21:8443: connect: connection refused" Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.263211 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.263296 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.263225 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.263472 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.633057 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:55 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:55 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:55 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:55 crc kubenswrapper[4703]: I0202 12:53:55.633131 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:56 crc kubenswrapper[4703]: I0202 12:53:56.621897 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:56 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:56 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:56 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:56 crc kubenswrapper[4703]: I0202 12:53:56.621952 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:57 crc kubenswrapper[4703]: I0202 12:53:57.621843 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:57 crc kubenswrapper[4703]: [-]has-synced failed: reason withheld Feb 02 12:53:57 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:57 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:57 crc kubenswrapper[4703]: I0202 12:53:57.621918 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:58 crc kubenswrapper[4703]: I0202 12:53:58.013691 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:58 crc kubenswrapper[4703]: I0202 12:53:58.027231 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60-metrics-certs\") pod \"network-metrics-daemon-n2htj\" (UID: \"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60\") " pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:58 crc kubenswrapper[4703]: I0202 12:53:58.248669 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-multus/network-metrics-daemon-n2htj" Feb 02 12:53:58 crc kubenswrapper[4703]: I0202 12:53:58.628629 4703 patch_prober.go:28] interesting pod/router-default-5444994796-9rxcr container/router namespace/openshift-ingress: Startup probe status=failure output="HTTP probe failed with statuscode: 500" start-of-body=[-]backend-http failed: reason withheld Feb 02 12:53:58 crc kubenswrapper[4703]: [+]has-synced ok Feb 02 12:53:58 crc kubenswrapper[4703]: [+]process-running ok Feb 02 12:53:58 crc kubenswrapper[4703]: healthz check failed Feb 02 12:53:58 crc kubenswrapper[4703]: I0202 12:53:58.630306 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-ingress/router-default-5444994796-9rxcr" podUID="78d19e19-18ac-44d1-ac32-bfe5c2b58f8d" containerName="router" probeResult="failure" output="HTTP probe failed with statuscode: 500" Feb 02 12:53:59 crc kubenswrapper[4703]: I0202 12:53:59.627016 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:53:59 crc kubenswrapper[4703]: I0202 12:53:59.632105 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ingress/router-default-5444994796-9rxcr" Feb 02 12:54:00 crc kubenswrapper[4703]: I0202 12:54:00.165447 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:54:00 crc kubenswrapper[4703]: I0202 12:54:00.166144 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" containerID="cri-o://7cb1aa05024f34b1618a4afe4a531706354a0af5e8dacb62af4b5cbec1b67e22" gracePeriod=30 Feb 02 12:54:00 crc kubenswrapper[4703]: I0202 12:54:00.180748 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:54:00 crc kubenswrapper[4703]: I0202 12:54:00.180992 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" containerID="cri-o://6541b5cdf5393985cbcb19eeb231e6712edca7bbc1af99ea9cad1777b33f473f" gracePeriod=30 Feb 02 12:54:01 crc kubenswrapper[4703]: I0202 12:54:01.733785 4703 generic.go:334] "Generic (PLEG): container finished" podID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerID="6541b5cdf5393985cbcb19eeb231e6712edca7bbc1af99ea9cad1777b33f473f" exitCode=0 Feb 02 12:54:01 crc kubenswrapper[4703]: I0202 12:54:01.733869 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" event={"ID":"a9a7821e-59f7-414f-bebc-9fab34813bbc","Type":"ContainerDied","Data":"6541b5cdf5393985cbcb19eeb231e6712edca7bbc1af99ea9cad1777b33f473f"} Feb 02 12:54:01 crc kubenswrapper[4703]: I0202 12:54:01.742431 4703 generic.go:334] "Generic (PLEG): container finished" podID="644a0236-d3fc-404d-a4da-203ca11b1316" containerID="7cb1aa05024f34b1618a4afe4a531706354a0af5e8dacb62af4b5cbec1b67e22" exitCode=0 Feb 02 12:54:01 crc kubenswrapper[4703]: I0202 12:54:01.742495 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" event={"ID":"644a0236-d3fc-404d-a4da-203ca11b1316","Type":"ContainerDied","Data":"7cb1aa05024f34b1618a4afe4a531706354a0af5e8dacb62af4b5cbec1b67e22"} Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.262807 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.262817 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.263509 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.263558 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.263481 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.264078 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="download-server" containerStatusID={"Type":"cri-o","ID":"053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9"} pod="openshift-console/downloads-7954f5f757-wwmdc" containerMessage="Container download-server failed liveness probe, will be restarted" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.264162 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" containerID="cri-o://053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9" gracePeriod=2 Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.264295 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.264319 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.294316 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.306184 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/console-f9d7485db-bb2nk" Feb 02 12:54:05 crc kubenswrapper[4703]: E0202 12:54:05.399776 4703 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod67d0ea85_5c1d_4420_afaa_0647a6c1eb93.slice/crio-053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9.scope\": RecentStats: unable to find data in memory cache]" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.665524 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.672796 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.700396 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:05 crc kubenswrapper[4703]: E0202 12:54:05.704001 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a6e53b52-19fe-43a8-99c4-20424253942d" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704134 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a6e53b52-19fe-43a8-99c4-20424253942d" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: E0202 12:54:05.704180 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704192 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: E0202 12:54:05.704236 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704246 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: E0202 12:54:05.704302 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa448388-6971-479d-a86e-1429a06f8f4a" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704312 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa448388-6971-479d-a86e-1429a06f8f4a" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704400 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tml5b\" (UniqueName: \"kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b\") pod \"644a0236-d3fc-404d-a4da-203ca11b1316\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704595 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert\") pod \"644a0236-d3fc-404d-a4da-203ca11b1316\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704645 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca\") pod \"a9a7821e-59f7-414f-bebc-9fab34813bbc\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704672 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-t5s2l\" (UniqueName: \"kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l\") pod \"a9a7821e-59f7-414f-bebc-9fab34813bbc\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704701 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config\") pod \"a9a7821e-59f7-414f-bebc-9fab34813bbc\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704795 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca\") pod \"644a0236-d3fc-404d-a4da-203ca11b1316\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.704831 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert\") pod \"a9a7821e-59f7-414f-bebc-9fab34813bbc\" (UID: \"a9a7821e-59f7-414f-bebc-9fab34813bbc\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.705086 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles\") pod \"644a0236-d3fc-404d-a4da-203ca11b1316\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.705158 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config\") pod \"644a0236-d3fc-404d-a4da-203ca11b1316\" (UID: \"644a0236-d3fc-404d-a4da-203ca11b1316\") " Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.707082 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config" (OuterVolumeSpecName: "config") pod "a9a7821e-59f7-414f-bebc-9fab34813bbc" (UID: "a9a7821e-59f7-414f-bebc-9fab34813bbc"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.707898 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca" (OuterVolumeSpecName: "client-ca") pod "a9a7821e-59f7-414f-bebc-9fab34813bbc" (UID: "a9a7821e-59f7-414f-bebc-9fab34813bbc"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.709151 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "644a0236-d3fc-404d-a4da-203ca11b1316" (UID: "644a0236-d3fc-404d-a4da-203ca11b1316"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.709968 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca" (OuterVolumeSpecName: "client-ca") pod "644a0236-d3fc-404d-a4da-203ca11b1316" (UID: "644a0236-d3fc-404d-a4da-203ca11b1316"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.711952 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config" (OuterVolumeSpecName: "config") pod "644a0236-d3fc-404d-a4da-203ca11b1316" (UID: "644a0236-d3fc-404d-a4da-203ca11b1316"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.713669 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.716744 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "a9a7821e-59f7-414f-bebc-9fab34813bbc" (UID: "a9a7821e-59f7-414f-bebc-9fab34813bbc"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.717002 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b" (OuterVolumeSpecName: "kube-api-access-tml5b") pod "644a0236-d3fc-404d-a4da-203ca11b1316" (UID: "644a0236-d3fc-404d-a4da-203ca11b1316"). InnerVolumeSpecName "kube-api-access-tml5b". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.717039 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l" (OuterVolumeSpecName: "kube-api-access-t5s2l") pod "a9a7821e-59f7-414f-bebc-9fab34813bbc" (UID: "a9a7821e-59f7-414f-bebc-9fab34813bbc"). InnerVolumeSpecName "kube-api-access-t5s2l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.720199 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.720247 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.720316 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa448388-6971-479d-a86e-1429a06f8f4a" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.720337 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a6e53b52-19fe-43a8-99c4-20424253942d" containerName="pruner" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.720792 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "644a0236-d3fc-404d-a4da-203ca11b1316" (UID: "644a0236-d3fc-404d-a4da-203ca11b1316"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.722185 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.722375 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.776386 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" event={"ID":"644a0236-d3fc-404d-a4da-203ca11b1316","Type":"ContainerDied","Data":"b90e25cf06afe529b73104c71b9033bf89a6790aa2169ccbb5a40bd785f07785"} Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.776451 4703 scope.go:117] "RemoveContainer" containerID="7cb1aa05024f34b1618a4afe4a531706354a0af5e8dacb62af4b5cbec1b67e22" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.776458 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.784617 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" event={"ID":"a9a7821e-59f7-414f-bebc-9fab34813bbc","Type":"ContainerDied","Data":"97c6c6e68f43964d0092778626602db8d35dd37a1cf164633ea1c56459e40939"} Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.784713 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.787134 4703 generic.go:334] "Generic (PLEG): container finished" podID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerID="053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9" exitCode=0 Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.787217 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wwmdc" event={"ID":"67d0ea85-5c1d-4420-afaa-0647a6c1eb93","Type":"ContainerDied","Data":"053aa37849f0c5f7b518d66a4805047a02129caba4f4dffaf778fbb12cd0d9a9"} Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.799253 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.802764 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-879f6c89f-7clns"] Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.817121 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9s7f6\" (UniqueName: \"kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.817236 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.819671 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820000 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820069 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820150 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820247 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820264 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/a9a7821e-59f7-414f-bebc-9fab34813bbc-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820290 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.820427 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/644a0236-d3fc-404d-a4da-203ca11b1316-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.821187 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tml5b\" (UniqueName: \"kubernetes.io/projected/644a0236-d3fc-404d-a4da-203ca11b1316-kube-api-access-tml5b\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.821215 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/644a0236-d3fc-404d-a4da-203ca11b1316-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.821227 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/a9a7821e-59f7-414f-bebc-9fab34813bbc-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.821256 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-t5s2l\" (UniqueName: \"kubernetes.io/projected/a9a7821e-59f7-414f-bebc-9fab34813bbc-kube-api-access-t5s2l\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.821520 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.824053 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld"] Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.922456 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.922537 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.922563 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.922592 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.922639 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9s7f6\" (UniqueName: \"kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.923778 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.923912 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.924264 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.928746 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.941227 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" path="/var/lib/kubelet/pods/644a0236-d3fc-404d-a4da-203ca11b1316/volumes" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.941950 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" path="/var/lib/kubelet/pods/a9a7821e-59f7-414f-bebc-9fab34813bbc/volumes" Feb 02 12:54:05 crc kubenswrapper[4703]: I0202 12:54:05.948792 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9s7f6\" (UniqueName: \"kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6\") pod \"controller-manager-99594499c-zjrgs\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:06 crc kubenswrapper[4703]: I0202 12:54:06.053928 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:06 crc kubenswrapper[4703]: I0202 12:54:06.254357 4703 patch_prober.go:28] interesting pod/controller-manager-879f6c89f-7clns container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 12:54:06 crc kubenswrapper[4703]: I0202 12:54:06.254425 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-879f6c89f-7clns" podUID="644a0236-d3fc-404d-a4da-203ca11b1316" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.5:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 12:54:06 crc kubenswrapper[4703]: I0202 12:54:06.264207 4703 patch_prober.go:28] interesting pod/route-controller-manager-6576b87f9c-jhwld container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" start-of-body= Feb 02 12:54:06 crc kubenswrapper[4703]: I0202 12:54:06.264293 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-6576b87f9c-jhwld" podUID="a9a7821e-59f7-414f-bebc-9fab34813bbc" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.6:8443/healthz\": net/http: request canceled while waiting for connection (Client.Timeout exceeded while awaiting headers)" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.164379 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.166438 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.169174 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.169826 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.170002 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.170120 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.171671 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.172132 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.189463 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.262354 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.262416 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ppp4d\" (UniqueName: \"kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.262438 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.262459 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.369491 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.369607 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ppp4d\" (UniqueName: \"kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.369639 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.369664 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.370879 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.373572 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.386883 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ppp4d\" (UniqueName: \"kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.389952 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config\") pod \"route-controller-manager-797d5dd8c7-hhkf6\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:08 crc kubenswrapper[4703]: I0202 12:54:08.492443 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:15 crc kubenswrapper[4703]: I0202 12:54:15.262993 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:15 crc kubenswrapper[4703]: I0202 12:54:15.263381 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:15 crc kubenswrapper[4703]: I0202 12:54:15.985301 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:54:15 crc kubenswrapper[4703]: I0202 12:54:15.985366 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:54:16 crc kubenswrapper[4703]: I0202 12:54:16.586360 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-operator-lifecycle-manager/package-server-manager-789f6589d5-x766n" Feb 02 12:54:20 crc kubenswrapper[4703]: I0202 12:54:20.033348 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:20 crc kubenswrapper[4703]: I0202 12:54:20.137612 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.223103 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.224921 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.236053 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver"/"installer-sa-dockercfg-5pr6n" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.236174 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver"/"kube-root-ca.crt" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.239966 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.266836 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.266953 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.369035 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.369115 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.369172 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.387303 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access\") pod \"revision-pruner-9-crc\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:23 crc kubenswrapper[4703]: I0202 12:54:23.570694 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:24 crc kubenswrapper[4703]: I0202 12:54:24.192817 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-network-diagnostics/network-check-target-xd92c" Feb 02 12:54:25 crc kubenswrapper[4703]: I0202 12:54:25.264668 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:25 crc kubenswrapper[4703]: I0202 12:54:25.264761 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:26 crc kubenswrapper[4703]: E0202 12:54:26.325167 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 12:54:26 crc kubenswrapper[4703]: E0202 12:54:26.325406 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-zwnl7,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-rzrkb_openshift-marketplace(0331857a-b571-4128-9927-b2cc5dd58969): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:26 crc kubenswrapper[4703]: E0202 12:54:26.327225 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-rzrkb" podUID="0331857a-b571-4128-9927-b2cc5dd58969" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.748186 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-rzrkb" podUID="0331857a-b571-4128-9927-b2cc5dd58969" Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.820123 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.824373 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.826165 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.865897 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.866101 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-vpwgc,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-z28wg_openshift-marketplace(869c519e-8d0f-41fb-9f62-ae66f567003f): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.867233 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-z28wg" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.875224 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-operator-index:v4.18" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.876227 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-tnprx,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-operators-jwrr8_openshift-marketplace(6a692829-23d1-4ed4-954d-33ded09c5a25): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:28 crc kubenswrapper[4703]: E0202 12:54:28.877485 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-operators-jwrr8" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.967589 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.968020 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:28 crc kubenswrapper[4703]: I0202 12:54:28.968046 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.069031 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.069099 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.069180 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.069395 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.069755 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.096907 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access\") pod \"installer-9-crc\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:29 crc kubenswrapper[4703]: I0202 12:54:29.191970 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.839986 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-z28wg" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.840064 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-operator-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-operators-jwrr8" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.923335 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.923673 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-qlwwm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-4qklh_openshift-marketplace(15647656-be1e-49d7-92dd-880ca1fd4d31): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.925062 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/certified-operator-index:v4.18" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.925060 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-4qklh" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.925166 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/certified-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-b58px,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod certified-operators-kbjrd_openshift-marketplace(ae6cfab9-b3ad-4791-bb0f-08b264c22a3d): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.926419 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/certified-operators-kbjrd" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.952227 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/community-operator-index:v4.18" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.952490 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/community-operator-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-kb4p8,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod community-operators-zf6z4_openshift-marketplace(28bd7c5d-2710-4c7c-af7b-f594c8d3352c): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:30 crc kubenswrapper[4703]: E0202 12:54:30.953891 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/community-operators-zf6z4" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.056737 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-4qklh" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.056746 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/community-operator-index:v4.18\\\"\"" pod="openshift-marketplace/community-operators-zf6z4" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.056813 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/certified-operator-index:v4.18\\\"\"" pod="openshift-marketplace/certified-operators-kbjrd" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.064431 4703 scope.go:117] "RemoveContainer" containerID="6541b5cdf5393985cbcb19eeb231e6712edca7bbc1af99ea9cad1777b33f473f" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.128686 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.129146 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-854hb,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-wn5cj_openshift-marketplace(c6f1054c-67cd-46bb-b781-62716d75231a): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.130701 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-wn5cj" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.173059 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" image="registry.redhat.io/redhat/redhat-marketplace-index:v4.18" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.173231 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="init container &Container{Name:extract-content,Image:registry.redhat.io/redhat/redhat-marketplace-index:v4.18,Command:[/utilities/copy-content],Args:[--catalog.from=/configs --catalog.to=/extracted-catalog/catalog --cache.from=/tmp/cache --cache.to=/extracted-catalog/cache],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:utilities,ReadOnly:false,MountPath:/utilities,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:catalog-content,ReadOnly:false,MountPath:/extracted-catalog,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8t5z5,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:Always,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[ALL],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*1000170000,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:*false,RunAsGroup:nil,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:FallbackToLogsOnError,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod redhat-marketplace-7c4qb_openshift-marketplace(7b2bcd29-093a-439f-b3ed-e2aef1ae6904): ErrImagePull: rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled" logger="UnhandledError" Feb 02 12:54:32 crc kubenswrapper[4703]: E0202 12:54:32.174891 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ErrImagePull: \"rpc error: code = Canceled desc = copying system image from manifest list: copying config: context canceled\"" pod="openshift-marketplace/redhat-marketplace-7c4qb" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.551097 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-multus/network-metrics-daemon-n2htj"] Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.556867 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:32 crc kubenswrapper[4703]: W0202 12:54:32.569743 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod0c2e7c06_8c42_4bc7_bcd5_611a6ad8fd60.slice/crio-673c03ed0345b7701f9ab1f04ca173f849a00bdbe4c4c287dae9416a1a605582 WatchSource:0}: Error finding container 673c03ed0345b7701f9ab1f04ca173f849a00bdbe4c4c287dae9416a1a605582: Status 404 returned error can't find the container with id 673c03ed0345b7701f9ab1f04ca173f849a00bdbe4c4c287dae9416a1a605582 Feb 02 12:54:32 crc kubenswrapper[4703]: W0202 12:54:32.570881 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod46014f77_bb1b_4879_8fbf_0bef4b6199fa.slice/crio-b83d7eefa6e9b0d8e6d2fed5405bea71a1d2801c649ff5ce922facbf589fca5b WatchSource:0}: Error finding container b83d7eefa6e9b0d8e6d2fed5405bea71a1d2801c649ff5ce922facbf589fca5b: Status 404 returned error can't find the container with id b83d7eefa6e9b0d8e6d2fed5405bea71a1d2801c649ff5ce922facbf589fca5b Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.634085 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/revision-pruner-9-crc"] Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.645860 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-kube-apiserver/installer-9-crc"] Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.653560 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:32 crc kubenswrapper[4703]: W0202 12:54:32.673348 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf5a934be_4111_493e_812f_df34f48d3d5b.slice/crio-b495039db99b1417cc408f8f727e51a3dc4f15cd5564a2bf97ff84291b59aecf WatchSource:0}: Error finding container b495039db99b1417cc408f8f727e51a3dc4f15cd5564a2bf97ff84291b59aecf: Status 404 returned error can't find the container with id b495039db99b1417cc408f8f727e51a3dc4f15cd5564a2bf97ff84291b59aecf Feb 02 12:54:32 crc kubenswrapper[4703]: W0202 12:54:32.674261 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-podbd22e13b_0430_4cb9_8c95_ffbce79855cf.slice/crio-24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111 WatchSource:0}: Error finding container 24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111: Status 404 returned error can't find the container with id 24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111 Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.965965 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2htj" event={"ID":"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60","Type":"ContainerStarted","Data":"b37a5c578d2532c9f1843eacfeb1213c5e04826966aaa2d357737430de1074c8"} Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.966390 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2htj" event={"ID":"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60","Type":"ContainerStarted","Data":"673c03ed0345b7701f9ab1f04ca173f849a00bdbe4c4c287dae9416a1a605582"} Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.974841 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2befffc6-fe81-4d23-aa1d-0a2824048c85","Type":"ContainerStarted","Data":"bc4034a82505cbd9b97c5ffa5a87af5333a3d2b3542779f12c976905186d019e"} Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.978029 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" event={"ID":"46014f77-bb1b-4879-8fbf-0bef4b6199fa","Type":"ContainerStarted","Data":"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f"} Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.978083 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" event={"ID":"46014f77-bb1b-4879-8fbf-0bef4b6199fa","Type":"ContainerStarted","Data":"b83d7eefa6e9b0d8e6d2fed5405bea71a1d2801c649ff5ce922facbf589fca5b"} Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.978180 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerName="route-controller-manager" containerID="cri-o://105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f" gracePeriod=30 Feb 02 12:54:32 crc kubenswrapper[4703]: I0202 12:54:32.978494 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.006224 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" podStartSLOduration=33.00620258 podStartE2EDuration="33.00620258s" podCreationTimestamp="2026-02-02 12:54:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:33.002312487 +0000 UTC m=+200.017520021" watchObservedRunningTime="2026-02-02 12:54:33.00620258 +0000 UTC m=+200.021410104" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.019301 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.019362 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.018654 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-console/downloads-7954f5f757-wwmdc" event={"ID":"67d0ea85-5c1d-4420-afaa-0647a6c1eb93","Type":"ContainerStarted","Data":"8f8910d924fbc4198e41c46db620120b535d8eac9bcd8124bb4e6234c12aa272"} Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.020035 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.035701 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bd22e13b-0430-4cb9-8c95-ffbce79855cf","Type":"ContainerStarted","Data":"24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111"} Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.038196 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" event={"ID":"f5a934be-4111-493e-812f-df34f48d3d5b","Type":"ContainerStarted","Data":"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae"} Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.038338 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" event={"ID":"f5a934be-4111-493e-812f-df34f48d3d5b","Type":"ContainerStarted","Data":"b495039db99b1417cc408f8f727e51a3dc4f15cd5564a2bf97ff84291b59aecf"} Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.038253 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" containerName="controller-manager" containerID="cri-o://970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae" gracePeriod=30 Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.039296 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:33 crc kubenswrapper[4703]: E0202 12:54:33.044090 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-wn5cj" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" Feb 02 12:54:33 crc kubenswrapper[4703]: E0202 12:54:33.051578 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"extract-content\" with ImagePullBackOff: \"Back-off pulling image \\\"registry.redhat.io/redhat/redhat-marketplace-index:v4.18\\\"\"" pod="openshift-marketplace/redhat-marketplace-7c4qb" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.070399 4703 patch_prober.go:28] interesting pod/controller-manager-99594499c-zjrgs container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:47670->10.217.0.54:8443: read: connection reset by peer" start-of-body= Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.070469 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.54:8443/healthz\": read tcp 10.217.0.2:47670->10.217.0.54:8443: read: connection reset by peer" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.102966 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" podStartSLOduration=33.102942238 podStartE2EDuration="33.102942238s" podCreationTimestamp="2026-02-02 12:54:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:33.097442763 +0000 UTC m=+200.112650317" watchObservedRunningTime="2026-02-02 12:54:33.102942238 +0000 UTC m=+200.118149772" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.264079 4703 patch_prober.go:28] interesting pod/route-controller-manager-797d5dd8c7-hhkf6 container/route-controller-manager namespace/openshift-route-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:41966->10.217.0.55:8443: read: connection reset by peer" start-of-body= Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.264343 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerName="route-controller-manager" probeResult="failure" output="Get \"https://10.217.0.55:8443/healthz\": read tcp 10.217.0.2:41966->10.217.0.55:8443: read: connection reset by peer" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.459170 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.494634 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:54:33 crc kubenswrapper[4703]: E0202 12:54:33.494872 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" containerName="controller-manager" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.494884 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" containerName="controller-manager" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.496463 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" containerName="controller-manager" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.496884 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.514046 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.533447 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config\") pod \"f5a934be-4111-493e-812f-df34f48d3d5b\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.533505 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9s7f6\" (UniqueName: \"kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6\") pod \"f5a934be-4111-493e-812f-df34f48d3d5b\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.533536 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert\") pod \"f5a934be-4111-493e-812f-df34f48d3d5b\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.533603 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca\") pod \"f5a934be-4111-493e-812f-df34f48d3d5b\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.533744 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles\") pod \"f5a934be-4111-493e-812f-df34f48d3d5b\" (UID: \"f5a934be-4111-493e-812f-df34f48d3d5b\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.535408 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca" (OuterVolumeSpecName: "client-ca") pod "f5a934be-4111-493e-812f-df34f48d3d5b" (UID: "f5a934be-4111-493e-812f-df34f48d3d5b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.535421 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "f5a934be-4111-493e-812f-df34f48d3d5b" (UID: "f5a934be-4111-493e-812f-df34f48d3d5b"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.535528 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config" (OuterVolumeSpecName: "config") pod "f5a934be-4111-493e-812f-df34f48d3d5b" (UID: "f5a934be-4111-493e-812f-df34f48d3d5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.540091 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6" (OuterVolumeSpecName: "kube-api-access-9s7f6") pod "f5a934be-4111-493e-812f-df34f48d3d5b" (UID: "f5a934be-4111-493e-812f-df34f48d3d5b"). InnerVolumeSpecName "kube-api-access-9s7f6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.546802 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-797d5dd8c7-hhkf6_46014f77-bb1b-4879-8fbf-0bef4b6199fa/route-controller-manager/0.log" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.547093 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.547388 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "f5a934be-4111-493e-812f-df34f48d3d5b" (UID: "f5a934be-4111-493e-812f-df34f48d3d5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635334 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert\") pod \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635398 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca\") pod \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635468 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config\") pod \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635495 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ppp4d\" (UniqueName: \"kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d\") pod \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\" (UID: \"46014f77-bb1b-4879-8fbf-0bef4b6199fa\") " Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635804 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635837 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635871 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635898 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q4wr9\" (UniqueName: \"kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635923 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635973 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635983 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.635993 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9s7f6\" (UniqueName: \"kubernetes.io/projected/f5a934be-4111-493e-812f-df34f48d3d5b-kube-api-access-9s7f6\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.636009 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/f5a934be-4111-493e-812f-df34f48d3d5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.636088 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/f5a934be-4111-493e-812f-df34f48d3d5b-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.636524 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca" (OuterVolumeSpecName: "client-ca") pod "46014f77-bb1b-4879-8fbf-0bef4b6199fa" (UID: "46014f77-bb1b-4879-8fbf-0bef4b6199fa"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.637510 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config" (OuterVolumeSpecName: "config") pod "46014f77-bb1b-4879-8fbf-0bef4b6199fa" (UID: "46014f77-bb1b-4879-8fbf-0bef4b6199fa"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.639433 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "46014f77-bb1b-4879-8fbf-0bef4b6199fa" (UID: "46014f77-bb1b-4879-8fbf-0bef4b6199fa"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.639798 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d" (OuterVolumeSpecName: "kube-api-access-ppp4d") pod "46014f77-bb1b-4879-8fbf-0bef4b6199fa" (UID: "46014f77-bb1b-4879-8fbf-0bef4b6199fa"). InnerVolumeSpecName "kube-api-access-ppp4d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737456 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737509 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737540 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737565 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q4wr9\" (UniqueName: \"kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737593 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737647 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/46014f77-bb1b-4879-8fbf-0bef4b6199fa-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737657 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737666 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/46014f77-bb1b-4879-8fbf-0bef4b6199fa-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.737675 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ppp4d\" (UniqueName: \"kubernetes.io/projected/46014f77-bb1b-4879-8fbf-0bef4b6199fa-kube-api-access-ppp4d\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.738641 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.738713 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.740436 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.753594 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.756493 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q4wr9\" (UniqueName: \"kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9\") pod \"controller-manager-54bd5fc9d-wxpt5\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:33 crc kubenswrapper[4703]: I0202 12:54:33.816619 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.034530 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.053190 4703 generic.go:334] "Generic (PLEG): container finished" podID="f5a934be-4111-493e-812f-df34f48d3d5b" containerID="970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae" exitCode=0 Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.053263 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" event={"ID":"f5a934be-4111-493e-812f-df34f48d3d5b","Type":"ContainerDied","Data":"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.053293 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.053329 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-99594499c-zjrgs" event={"ID":"f5a934be-4111-493e-812f-df34f48d3d5b","Type":"ContainerDied","Data":"b495039db99b1417cc408f8f727e51a3dc4f15cd5564a2bf97ff84291b59aecf"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.053347 4703 scope.go:117] "RemoveContainer" containerID="970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.060762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/network-metrics-daemon-n2htj" event={"ID":"0c2e7c06-8c42-4bc7-bcd5-611a6ad8fd60","Type":"ContainerStarted","Data":"eca94eb754ca1a819622722b52fdeea40dc720adcd6afe9eb9f787babebbcd89"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.064215 4703 generic.go:334] "Generic (PLEG): container finished" podID="2befffc6-fe81-4d23-aa1d-0a2824048c85" containerID="20dc597b9a40b06fb01b200a31f8fe63f5c5f0bc955b37c6c6b98ec9d9ad6c99" exitCode=0 Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.064314 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2befffc6-fe81-4d23-aa1d-0a2824048c85","Type":"ContainerDied","Data":"20dc597b9a40b06fb01b200a31f8fe63f5c5f0bc955b37c6c6b98ec9d9ad6c99"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.071429 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.071844 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-route-controller-manager_route-controller-manager-797d5dd8c7-hhkf6_46014f77-bb1b-4879-8fbf-0bef4b6199fa/route-controller-manager/0.log" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.071898 4703 generic.go:334] "Generic (PLEG): container finished" podID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerID="105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f" exitCode=255 Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.071954 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" event={"ID":"46014f77-bb1b-4879-8fbf-0bef4b6199fa","Type":"ContainerDied","Data":"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.072003 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.072015 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6" event={"ID":"46014f77-bb1b-4879-8fbf-0bef4b6199fa","Type":"ContainerDied","Data":"b83d7eefa6e9b0d8e6d2fed5405bea71a1d2801c649ff5ce922facbf589fca5b"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.073783 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" event={"ID":"feb9f6b4-8be8-414a-add7-3f3f2e777ade","Type":"ContainerStarted","Data":"34b5cb2b38640125ed32e7d22e051914b00281c77e1e88daf9f674de2cff02d3"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.074469 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-99594499c-zjrgs"] Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.076551 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bd22e13b-0430-4cb9-8c95-ffbce79855cf","Type":"ContainerStarted","Data":"dee4d94f4f54a8926ae1d82de9dd1b6d8cc6db1850945b1f5b41cdcb967a6838"} Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.076700 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.076735 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.087625 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-multus/network-metrics-daemon-n2htj" podStartSLOduration=179.087609364 podStartE2EDuration="2m59.087609364s" podCreationTimestamp="2026-02-02 12:51:35 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:34.085743764 +0000 UTC m=+201.100951308" watchObservedRunningTime="2026-02-02 12:54:34.087609364 +0000 UTC m=+201.102816898" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.089977 4703 scope.go:117] "RemoveContainer" containerID="970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae" Feb 02 12:54:34 crc kubenswrapper[4703]: E0202 12:54:34.091417 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae\": container with ID starting with 970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae not found: ID does not exist" containerID="970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.091442 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae"} err="failed to get container status \"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae\": rpc error: code = NotFound desc = could not find container \"970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae\": container with ID starting with 970b483ea8d41e43c6d53549c92841ec835d34305349d9d0311d0aa70260f8ae not found: ID does not exist" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.091460 4703 scope.go:117] "RemoveContainer" containerID="105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.122987 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/installer-9-crc" podStartSLOduration=6.122965528 podStartE2EDuration="6.122965528s" podCreationTimestamp="2026-02-02 12:54:28 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:34.114639398 +0000 UTC m=+201.129846932" watchObservedRunningTime="2026-02-02 12:54:34.122965528 +0000 UTC m=+201.138173062" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.168248 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.171660 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-797d5dd8c7-hhkf6"] Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.178113 4703 scope.go:117] "RemoveContainer" containerID="105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f" Feb 02 12:54:34 crc kubenswrapper[4703]: E0202 12:54:34.178757 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f\": container with ID starting with 105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f not found: ID does not exist" containerID="105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f" Feb 02 12:54:34 crc kubenswrapper[4703]: I0202 12:54:34.178808 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f"} err="failed to get container status \"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f\": rpc error: code = NotFound desc = could not find container \"105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f\": container with ID starting with 105b59aa6dcc0036218ef55a7980c12d62c05f57f41936e983390b2995454e4f not found: ID does not exist" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.081728 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" event={"ID":"feb9f6b4-8be8-414a-add7-3f3f2e777ade","Type":"ContainerStarted","Data":"37013255ae2b44a1f015b35aed61fc0fb685ecca108ceb40967f2410875b0854"} Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.082497 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.092169 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.104702 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" podStartSLOduration=15.104682625 podStartE2EDuration="15.104682625s" podCreationTimestamp="2026-02-02 12:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:35.103820402 +0000 UTC m=+202.119027956" watchObservedRunningTime="2026-02-02 12:54:35.104682625 +0000 UTC m=+202.119890159" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.263234 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Readiness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.263571 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.263835 4703 patch_prober.go:28] interesting pod/downloads-7954f5f757-wwmdc container/download-server namespace/openshift-console: Liveness probe status=failure output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" start-of-body= Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.263853 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-console/downloads-7954f5f757-wwmdc" podUID="67d0ea85-5c1d-4420-afaa-0647a6c1eb93" containerName="download-server" probeResult="failure" output="Get \"http://10.217.0.10:8080/\": dial tcp 10.217.0.10:8080: connect: connection refused" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.382336 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.463799 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access\") pod \"2befffc6-fe81-4d23-aa1d-0a2824048c85\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.463913 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir\") pod \"2befffc6-fe81-4d23-aa1d-0a2824048c85\" (UID: \"2befffc6-fe81-4d23-aa1d-0a2824048c85\") " Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.464054 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "2befffc6-fe81-4d23-aa1d-0a2824048c85" (UID: "2befffc6-fe81-4d23-aa1d-0a2824048c85"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.464208 4703 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/2befffc6-fe81-4d23-aa1d-0a2824048c85-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.469362 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "2befffc6-fe81-4d23-aa1d-0a2824048c85" (UID: "2befffc6-fe81-4d23-aa1d-0a2824048c85"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.565457 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/2befffc6-fe81-4d23-aa1d-0a2824048c85-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.941805 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" path="/var/lib/kubelet/pods/46014f77-bb1b-4879-8fbf-0bef4b6199fa/volumes" Feb 02 12:54:35 crc kubenswrapper[4703]: I0202 12:54:35.942540 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f5a934be-4111-493e-812f-df34f48d3d5b" path="/var/lib/kubelet/pods/f5a934be-4111-493e-812f-df34f48d3d5b/volumes" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.092134 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/revision-pruner-9-crc" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.092124 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/revision-pruner-9-crc" event={"ID":"2befffc6-fe81-4d23-aa1d-0a2824048c85","Type":"ContainerDied","Data":"bc4034a82505cbd9b97c5ffa5a87af5333a3d2b3542779f12c976905186d019e"} Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.092191 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="bc4034a82505cbd9b97c5ffa5a87af5333a3d2b3542779f12c976905186d019e" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.193463 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:54:36 crc kubenswrapper[4703]: E0202 12:54:36.194402 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2befffc6-fe81-4d23-aa1d-0a2824048c85" containerName="pruner" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.194527 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2befffc6-fe81-4d23-aa1d-0a2824048c85" containerName="pruner" Feb 02 12:54:36 crc kubenswrapper[4703]: E0202 12:54:36.194610 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerName="route-controller-manager" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.194681 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerName="route-controller-manager" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.194875 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="46014f77-bb1b-4879-8fbf-0bef4b6199fa" containerName="route-controller-manager" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.194945 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2befffc6-fe81-4d23-aa1d-0a2824048c85" containerName="pruner" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.195434 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.197778 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.198034 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.198192 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.200796 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.200843 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.201597 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.206363 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.274360 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4pkk6\" (UniqueName: \"kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.274449 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.274495 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.274516 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.375502 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4pkk6\" (UniqueName: \"kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.375550 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.375582 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.375619 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.376753 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.377571 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.384735 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.406051 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4pkk6\" (UniqueName: \"kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6\") pod \"route-controller-manager-779ff649cb-k8j8m\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.529959 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:36 crc kubenswrapper[4703]: I0202 12:54:36.914841 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:54:36 crc kubenswrapper[4703]: W0202 12:54:36.920336 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod18617984_4e98_4008_ab3f_4d448f8a4f07.slice/crio-1a557bb16743a47ed4061cee817f63565d5d31f32cd6b8fd37ca80ac993ff64a WatchSource:0}: Error finding container 1a557bb16743a47ed4061cee817f63565d5d31f32cd6b8fd37ca80ac993ff64a: Status 404 returned error can't find the container with id 1a557bb16743a47ed4061cee817f63565d5d31f32cd6b8fd37ca80ac993ff64a Feb 02 12:54:37 crc kubenswrapper[4703]: I0202 12:54:37.100822 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" event={"ID":"18617984-4e98-4008-ab3f-4d448f8a4f07","Type":"ContainerStarted","Data":"1a557bb16743a47ed4061cee817f63565d5d31f32cd6b8fd37ca80ac993ff64a"} Feb 02 12:54:38 crc kubenswrapper[4703]: I0202 12:54:38.111467 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" event={"ID":"18617984-4e98-4008-ab3f-4d448f8a4f07","Type":"ContainerStarted","Data":"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045"} Feb 02 12:54:38 crc kubenswrapper[4703]: I0202 12:54:38.111822 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:38 crc kubenswrapper[4703]: I0202 12:54:38.117513 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:54:38 crc kubenswrapper[4703]: I0202 12:54:38.144261 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" podStartSLOduration=18.144236194 podStartE2EDuration="18.144236194s" podCreationTimestamp="2026-02-02 12:54:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:54:38.133979762 +0000 UTC m=+205.149187316" watchObservedRunningTime="2026-02-02 12:54:38.144236194 +0000 UTC m=+205.159443718" Feb 02 12:54:38 crc kubenswrapper[4703]: I0202 12:54:38.569089 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:54:44 crc kubenswrapper[4703]: I0202 12:54:44.162621 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerStarted","Data":"d1f79b97d4b4e76caf43ac07d34154e990756e663deb1760e360fd37626c60ae"} Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.172934 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerStarted","Data":"a3eec60f3b1144aad07846a462ac362058cac3af72ff62e7ef964f80f6676308"} Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.179415 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerStarted","Data":"aab0997c56c0062bc25c3c3e7d0e3494866fefa75dfb386781781c65bb984fc2"} Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.181284 4703 generic.go:334] "Generic (PLEG): container finished" podID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerID="d1f79b97d4b4e76caf43ac07d34154e990756e663deb1760e360fd37626c60ae" exitCode=0 Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.181345 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerDied","Data":"d1f79b97d4b4e76caf43ac07d34154e990756e663deb1760e360fd37626c60ae"} Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.183498 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerStarted","Data":"557a7b680a3b7c85ce50e22133d18f1ec2ed7ab52bda65a4272bcf8248d49257"} Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.270389 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-console/downloads-7954f5f757-wwmdc" Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.985245 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.985325 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.985374 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.986042 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 12:54:45 crc kubenswrapper[4703]: I0202 12:54:45.986096 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96" gracePeriod=600 Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.195664 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96" exitCode=0 Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.195742 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96"} Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.198074 4703 generic.go:334] "Generic (PLEG): container finished" podID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerID="557a7b680a3b7c85ce50e22133d18f1ec2ed7ab52bda65a4272bcf8248d49257" exitCode=0 Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.198225 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerDied","Data":"557a7b680a3b7c85ce50e22133d18f1ec2ed7ab52bda65a4272bcf8248d49257"} Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.200040 4703 generic.go:334] "Generic (PLEG): container finished" podID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerID="a3eec60f3b1144aad07846a462ac362058cac3af72ff62e7ef964f80f6676308" exitCode=0 Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.200091 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerDied","Data":"a3eec60f3b1144aad07846a462ac362058cac3af72ff62e7ef964f80f6676308"} Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.204609 4703 generic.go:334] "Generic (PLEG): container finished" podID="0331857a-b571-4128-9927-b2cc5dd58969" containerID="aab0997c56c0062bc25c3c3e7d0e3494866fefa75dfb386781781c65bb984fc2" exitCode=0 Feb 02 12:54:47 crc kubenswrapper[4703]: I0202 12:54:47.204651 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerDied","Data":"aab0997c56c0062bc25c3c3e7d0e3494866fefa75dfb386781781c65bb984fc2"} Feb 02 12:54:48 crc kubenswrapper[4703]: I0202 12:54:48.211992 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.234744 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerStarted","Data":"4352b631e36a635d35fb1ea278f35e10e9e1f9e66e312a0e6d3ec61e173353ae"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.237768 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerStarted","Data":"ffd051a1410d380e0073a5797f166eb8ca738539748f83fd58acc1ab1e3d0b77"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.241460 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerStarted","Data":"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.244013 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerStarted","Data":"a596dbd293b0cfa248382ad3a8344281598b01f6da1e5ddd14844f54a3eda9b7"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.246801 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerStarted","Data":"af2a2368d9fb50020651acde1c3ac3f21de8bf90c4dd9b1d43e8769e48d10985"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.249216 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerStarted","Data":"8e1a5d5d4f5a724821134712f71e25f210d6f87c07fc0501162aea0707a35798"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.251650 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerStarted","Data":"f3a3f1c75cf560db7ce3523779ccdbd6b9e69c9d344ec577a3be4f9ec262686e"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.253942 4703 generic.go:334] "Generic (PLEG): container finished" podID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerID="1b29849f4cf9b8ee6eeb897f955c2cafb2bd05e3d5660507396d4db5cd3c9b2f" exitCode=0 Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.253990 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerDied","Data":"1b29849f4cf9b8ee6eeb897f955c2cafb2bd05e3d5660507396d4db5cd3c9b2f"} Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.259708 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-kbjrd" podStartSLOduration=4.191366841 podStartE2EDuration="1m7.259692246s" podCreationTimestamp="2026-02-02 12:53:44 +0000 UTC" firstStartedPulling="2026-02-02 12:53:47.081679379 +0000 UTC m=+154.096886913" lastFinishedPulling="2026-02-02 12:54:50.150004784 +0000 UTC m=+217.165212318" observedRunningTime="2026-02-02 12:54:51.25758432 +0000 UTC m=+218.272791874" watchObservedRunningTime="2026-02-02 12:54:51.259692246 +0000 UTC m=+218.274899780" Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.306547 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-z28wg" podStartSLOduration=3.023355172 podStartE2EDuration="1m6.306524054s" podCreationTimestamp="2026-02-02 12:53:45 +0000 UTC" firstStartedPulling="2026-02-02 12:53:47.090048674 +0000 UTC m=+154.105256208" lastFinishedPulling="2026-02-02 12:54:50.373217556 +0000 UTC m=+217.388425090" observedRunningTime="2026-02-02 12:54:51.30336072 +0000 UTC m=+218.318568264" watchObservedRunningTime="2026-02-02 12:54:51.306524054 +0000 UTC m=+218.321731588" Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.334076 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-rzrkb" podStartSLOduration=4.4028573 podStartE2EDuration="1m4.334061532s" podCreationTimestamp="2026-02-02 12:53:47 +0000 UTC" firstStartedPulling="2026-02-02 12:53:50.238896194 +0000 UTC m=+157.254103728" lastFinishedPulling="2026-02-02 12:54:50.170100426 +0000 UTC m=+217.185307960" observedRunningTime="2026-02-02 12:54:51.330928309 +0000 UTC m=+218.346135843" watchObservedRunningTime="2026-02-02 12:54:51.334061532 +0000 UTC m=+218.349269066" Feb 02 12:54:51 crc kubenswrapper[4703]: I0202 12:54:51.370285 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-zf6z4" podStartSLOduration=4.351631493 podStartE2EDuration="1m7.370247889s" podCreationTimestamp="2026-02-02 12:53:44 +0000 UTC" firstStartedPulling="2026-02-02 12:53:47.07017885 +0000 UTC m=+154.085386394" lastFinishedPulling="2026-02-02 12:54:50.088795256 +0000 UTC m=+217.104002790" observedRunningTime="2026-02-02 12:54:51.369744936 +0000 UTC m=+218.384952470" watchObservedRunningTime="2026-02-02 12:54:51.370247889 +0000 UTC m=+218.385455423" Feb 02 12:54:52 crc kubenswrapper[4703]: I0202 12:54:52.262874 4703 generic.go:334] "Generic (PLEG): container finished" podID="c6f1054c-67cd-46bb-b781-62716d75231a" containerID="8e1a5d5d4f5a724821134712f71e25f210d6f87c07fc0501162aea0707a35798" exitCode=0 Feb 02 12:54:52 crc kubenswrapper[4703]: I0202 12:54:52.262997 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerDied","Data":"8e1a5d5d4f5a724821134712f71e25f210d6f87c07fc0501162aea0707a35798"} Feb 02 12:54:53 crc kubenswrapper[4703]: I0202 12:54:53.278244 4703 generic.go:334] "Generic (PLEG): container finished" podID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerID="a596dbd293b0cfa248382ad3a8344281598b01f6da1e5ddd14844f54a3eda9b7" exitCode=0 Feb 02 12:54:53 crc kubenswrapper[4703]: I0202 12:54:53.278319 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerDied","Data":"a596dbd293b0cfa248382ad3a8344281598b01f6da1e5ddd14844f54a3eda9b7"} Feb 02 12:54:54 crc kubenswrapper[4703]: I0202 12:54:54.286473 4703 generic.go:334] "Generic (PLEG): container finished" podID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerID="cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0" exitCode=0 Feb 02 12:54:54 crc kubenswrapper[4703]: I0202 12:54:54.286557 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerDied","Data":"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0"} Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.168549 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.168608 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.427801 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.427870 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.575510 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:54:55 crc kubenswrapper[4703]: I0202 12:54:55.575916 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.264113 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.265559 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.268804 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.314606 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.356056 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:54:56 crc kubenswrapper[4703]: I0202 12:54:56.360491 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:54:57 crc kubenswrapper[4703]: I0202 12:54:57.304228 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerStarted","Data":"7146b5a7e28d5f2788438a1896aff9e2b7a08a0c9cf929c4c06954b9ee11193b"} Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.328662 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-7c4qb" podStartSLOduration=6.636123116 podStartE2EDuration="1m12.328641854s" podCreationTimestamp="2026-02-02 12:53:46 +0000 UTC" firstStartedPulling="2026-02-02 12:53:49.178500064 +0000 UTC m=+156.193707598" lastFinishedPulling="2026-02-02 12:54:54.871018802 +0000 UTC m=+221.886226336" observedRunningTime="2026-02-02 12:54:58.327284298 +0000 UTC m=+225.342491852" watchObservedRunningTime="2026-02-02 12:54:58.328641854 +0000 UTC m=+225.343849388" Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.365460 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.365516 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.404692 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.959640 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:54:58 crc kubenswrapper[4703]: I0202 12:54:58.959854 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-z28wg" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="registry-server" containerID="cri-o://ffd051a1410d380e0073a5797f166eb8ca738539748f83fd58acc1ab1e3d0b77" gracePeriod=2 Feb 02 12:54:59 crc kubenswrapper[4703]: I0202 12:54:59.367890 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.035116 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.035425 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" podUID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" containerName="controller-manager" containerID="cri-o://37013255ae2b44a1f015b35aed61fc0fb685ecca108ceb40967f2410875b0854" gracePeriod=30 Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.139443 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.141025 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" podUID="18617984-4e98-4008-ab3f-4d448f8a4f07" containerName="route-controller-manager" containerID="cri-o://d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045" gracePeriod=30 Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.334323 4703 generic.go:334] "Generic (PLEG): container finished" podID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerID="ffd051a1410d380e0073a5797f166eb8ca738539748f83fd58acc1ab1e3d0b77" exitCode=0 Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.334406 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerDied","Data":"ffd051a1410d380e0073a5797f166eb8ca738539748f83fd58acc1ab1e3d0b77"} Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.368579 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.446795 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vpwgc\" (UniqueName: \"kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc\") pod \"869c519e-8d0f-41fb-9f62-ae66f567003f\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.446914 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content\") pod \"869c519e-8d0f-41fb-9f62-ae66f567003f\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.446954 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities\") pod \"869c519e-8d0f-41fb-9f62-ae66f567003f\" (UID: \"869c519e-8d0f-41fb-9f62-ae66f567003f\") " Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.448046 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities" (OuterVolumeSpecName: "utilities") pod "869c519e-8d0f-41fb-9f62-ae66f567003f" (UID: "869c519e-8d0f-41fb-9f62-ae66f567003f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.458566 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc" (OuterVolumeSpecName: "kube-api-access-vpwgc") pod "869c519e-8d0f-41fb-9f62-ae66f567003f" (UID: "869c519e-8d0f-41fb-9f62-ae66f567003f"). InnerVolumeSpecName "kube-api-access-vpwgc". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.501555 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "869c519e-8d0f-41fb-9f62-ae66f567003f" (UID: "869c519e-8d0f-41fb-9f62-ae66f567003f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.548895 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vpwgc\" (UniqueName: \"kubernetes.io/projected/869c519e-8d0f-41fb-9f62-ae66f567003f-kube-api-access-vpwgc\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.548937 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:00 crc kubenswrapper[4703]: I0202 12:55:00.548947 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/869c519e-8d0f-41fb-9f62-ae66f567003f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.146695 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.226413 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:55:01 crc kubenswrapper[4703]: E0202 12:55:01.226865 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="18617984-4e98-4008-ab3f-4d448f8a4f07" containerName="route-controller-manager" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.226973 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="18617984-4e98-4008-ab3f-4d448f8a4f07" containerName="route-controller-manager" Feb 02 12:55:01 crc kubenswrapper[4703]: E0202 12:55:01.231353 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="extract-utilities" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.231486 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="extract-utilities" Feb 02 12:55:01 crc kubenswrapper[4703]: E0202 12:55:01.231558 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="registry-server" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.231623 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="registry-server" Feb 02 12:55:01 crc kubenswrapper[4703]: E0202 12:55:01.232068 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="extract-content" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.232233 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="extract-content" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.232577 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" containerName="registry-server" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.232673 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="18617984-4e98-4008-ab3f-4d448f8a4f07" containerName="route-controller-manager" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.233236 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.234592 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.257801 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca\") pod \"18617984-4e98-4008-ab3f-4d448f8a4f07\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.257854 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert\") pod \"18617984-4e98-4008-ab3f-4d448f8a4f07\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.257884 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config\") pod \"18617984-4e98-4008-ab3f-4d448f8a4f07\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.257924 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4pkk6\" (UniqueName: \"kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6\") pod \"18617984-4e98-4008-ab3f-4d448f8a4f07\" (UID: \"18617984-4e98-4008-ab3f-4d448f8a4f07\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.260100 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca" (OuterVolumeSpecName: "client-ca") pod "18617984-4e98-4008-ab3f-4d448f8a4f07" (UID: "18617984-4e98-4008-ab3f-4d448f8a4f07"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.260299 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config" (OuterVolumeSpecName: "config") pod "18617984-4e98-4008-ab3f-4d448f8a4f07" (UID: "18617984-4e98-4008-ab3f-4d448f8a4f07"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.261582 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6" (OuterVolumeSpecName: "kube-api-access-4pkk6") pod "18617984-4e98-4008-ab3f-4d448f8a4f07" (UID: "18617984-4e98-4008-ab3f-4d448f8a4f07"). InnerVolumeSpecName "kube-api-access-4pkk6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.262573 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "18617984-4e98-4008-ab3f-4d448f8a4f07" (UID: "18617984-4e98-4008-ab3f-4d448f8a4f07"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.344509 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerStarted","Data":"715292648aa3faed93659ce0d29b5a61ff52bfd76908000aa01f5f0fc1603981"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.346547 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerStarted","Data":"cf46d9c3973c96634ec200117964f20c05825d91f91007940b87b47d8b338a80"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.350132 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerStarted","Data":"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.355979 4703 generic.go:334] "Generic (PLEG): container finished" podID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" containerID="37013255ae2b44a1f015b35aed61fc0fb685ecca108ceb40967f2410875b0854" exitCode=0 Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.356015 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" event={"ID":"feb9f6b4-8be8-414a-add7-3f3f2e777ade","Type":"ContainerDied","Data":"37013255ae2b44a1f015b35aed61fc0fb685ecca108ceb40967f2410875b0854"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.358413 4703 generic.go:334] "Generic (PLEG): container finished" podID="18617984-4e98-4008-ab3f-4d448f8a4f07" containerID="d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045" exitCode=0 Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.358472 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.358499 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" event={"ID":"18617984-4e98-4008-ab3f-4d448f8a4f07","Type":"ContainerDied","Data":"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.358529 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m" event={"ID":"18617984-4e98-4008-ab3f-4d448f8a4f07","Type":"ContainerDied","Data":"1a557bb16743a47ed4061cee817f63565d5d31f32cd6b8fd37ca80ac993ff64a"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.358548 4703 scope.go:117] "RemoveContainer" containerID="d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359046 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4hjf7\" (UniqueName: \"kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359092 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359156 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359196 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359250 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359265 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/18617984-4e98-4008-ab3f-4d448f8a4f07-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359295 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/18617984-4e98-4008-ab3f-4d448f8a4f07-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.359309 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4pkk6\" (UniqueName: \"kubernetes.io/projected/18617984-4e98-4008-ab3f-4d448f8a4f07-kube-api-access-4pkk6\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.365905 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-z28wg" event={"ID":"869c519e-8d0f-41fb-9f62-ae66f567003f","Type":"ContainerDied","Data":"d9158735b42076e56358baec39dac37c64d64b5f0dbc40222014a3b76857c4b1"} Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.366021 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-z28wg" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.373823 4703 scope.go:117] "RemoveContainer" containerID="d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045" Feb 02 12:55:01 crc kubenswrapper[4703]: E0202 12:55:01.376765 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045\": container with ID starting with d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045 not found: ID does not exist" containerID="d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.376801 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045"} err="failed to get container status \"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045\": rpc error: code = NotFound desc = could not find container \"d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045\": container with ID starting with d905f0f2942cef040f3df0c1ba7979c8e638426f05acf9b7f1ad3f19f2612045 not found: ID does not exist" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.376837 4703 scope.go:117] "RemoveContainer" containerID="ffd051a1410d380e0073a5797f166eb8ca738539748f83fd58acc1ab1e3d0b77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.383153 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-4qklh" podStartSLOduration=2.542183746 podStartE2EDuration="1m16.383137377s" podCreationTimestamp="2026-02-02 12:53:45 +0000 UTC" firstStartedPulling="2026-02-02 12:53:47.127658446 +0000 UTC m=+154.142865980" lastFinishedPulling="2026-02-02 12:55:00.968612077 +0000 UTC m=+227.983819611" observedRunningTime="2026-02-02 12:55:01.378063843 +0000 UTC m=+228.393271397" watchObservedRunningTime="2026-02-02 12:55:01.383137377 +0000 UTC m=+228.398344911" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.400265 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.400559 4703 scope.go:117] "RemoveContainer" containerID="a3eec60f3b1144aad07846a462ac362058cac3af72ff62e7ef964f80f6676308" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.417544 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-779ff649cb-k8j8m"] Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.431328 4703 scope.go:117] "RemoveContainer" containerID="a260c39214e69c38c4521a147e4262d8e3738fbf85c1396a7975290ae5e5c7db" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.449908 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-jwrr8" podStartSLOduration=2.746632172 podStartE2EDuration="1m13.449891072s" podCreationTimestamp="2026-02-02 12:53:48 +0000 UTC" firstStartedPulling="2026-02-02 12:53:50.269117226 +0000 UTC m=+157.284324760" lastFinishedPulling="2026-02-02 12:55:00.972376136 +0000 UTC m=+227.987583660" observedRunningTime="2026-02-02 12:55:01.428921428 +0000 UTC m=+228.444128962" watchObservedRunningTime="2026-02-02 12:55:01.449891072 +0000 UTC m=+228.465098606" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.459902 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4hjf7\" (UniqueName: \"kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.459982 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.460010 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.460036 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.463947 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.465763 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.470047 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-wn5cj" podStartSLOduration=3.563322981 podStartE2EDuration="1m14.470028975s" podCreationTimestamp="2026-02-02 12:53:47 +0000 UTC" firstStartedPulling="2026-02-02 12:53:49.267889318 +0000 UTC m=+156.283096852" lastFinishedPulling="2026-02-02 12:55:00.174595312 +0000 UTC m=+227.189802846" observedRunningTime="2026-02-02 12:55:01.447715555 +0000 UTC m=+228.462923089" watchObservedRunningTime="2026-02-02 12:55:01.470028975 +0000 UTC m=+228.485236509" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.472781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.472853 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.474896 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-z28wg"] Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.477845 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4hjf7\" (UniqueName: \"kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7\") pod \"route-controller-manager-68884dbf87-gmb77\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.558418 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.775281 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.866958 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles\") pod \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.867045 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-q4wr9\" (UniqueName: \"kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9\") pod \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.867079 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config\") pod \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.867126 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert\") pod \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.867167 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca\") pod \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\" (UID: \"feb9f6b4-8be8-414a-add7-3f3f2e777ade\") " Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.867987 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "feb9f6b4-8be8-414a-add7-3f3f2e777ade" (UID: "feb9f6b4-8be8-414a-add7-3f3f2e777ade"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.868062 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config" (OuterVolumeSpecName: "config") pod "feb9f6b4-8be8-414a-add7-3f3f2e777ade" (UID: "feb9f6b4-8be8-414a-add7-3f3f2e777ade"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.868094 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca" (OuterVolumeSpecName: "client-ca") pod "feb9f6b4-8be8-414a-add7-3f3f2e777ade" (UID: "feb9f6b4-8be8-414a-add7-3f3f2e777ade"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.874464 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "feb9f6b4-8be8-414a-add7-3f3f2e777ade" (UID: "feb9f6b4-8be8-414a-add7-3f3f2e777ade"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.874479 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9" (OuterVolumeSpecName: "kube-api-access-q4wr9") pod "feb9f6b4-8be8-414a-add7-3f3f2e777ade" (UID: "feb9f6b4-8be8-414a-add7-3f3f2e777ade"). InnerVolumeSpecName "kube-api-access-q4wr9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.940750 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="18617984-4e98-4008-ab3f-4d448f8a4f07" path="/var/lib/kubelet/pods/18617984-4e98-4008-ab3f-4d448f8a4f07/volumes" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.941389 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="869c519e-8d0f-41fb-9f62-ae66f567003f" path="/var/lib/kubelet/pods/869c519e-8d0f-41fb-9f62-ae66f567003f/volumes" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.969288 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.969331 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-q4wr9\" (UniqueName: \"kubernetes.io/projected/feb9f6b4-8be8-414a-add7-3f3f2e777ade-kube-api-access-q4wr9\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.969348 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.969362 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/feb9f6b4-8be8-414a-add7-3f3f2e777ade-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:01 crc kubenswrapper[4703]: I0202 12:55:01.969372 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/feb9f6b4-8be8-414a-add7-3f3f2e777ade-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.005513 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:55:02 crc kubenswrapper[4703]: W0202 12:55:02.013451 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcf096749_b5ec_4fde_a26f_32372db74d5b.slice/crio-75ab47849cd180a71c95d25fe9fc3f7ee12d016318f821cfc82e661028716952 WatchSource:0}: Error finding container 75ab47849cd180a71c95d25fe9fc3f7ee12d016318f821cfc82e661028716952: Status 404 returned error can't find the container with id 75ab47849cd180a71c95d25fe9fc3f7ee12d016318f821cfc82e661028716952 Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.373909 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" event={"ID":"feb9f6b4-8be8-414a-add7-3f3f2e777ade","Type":"ContainerDied","Data":"34b5cb2b38640125ed32e7d22e051914b00281c77e1e88daf9f674de2cff02d3"} Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.373950 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5" Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.373968 4703 scope.go:117] "RemoveContainer" containerID="37013255ae2b44a1f015b35aed61fc0fb685ecca108ceb40967f2410875b0854" Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.381939 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" event={"ID":"cf096749-b5ec-4fde-a26f-32372db74d5b","Type":"ContainerStarted","Data":"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362"} Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.381988 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" event={"ID":"cf096749-b5ec-4fde-a26f-32372db74d5b","Type":"ContainerStarted","Data":"75ab47849cd180a71c95d25fe9fc3f7ee12d016318f821cfc82e661028716952"} Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.382218 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.391973 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.397666 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-54bd5fc9d-wxpt5"] Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.426642 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" podStartSLOduration=2.4266198279999998 podStartE2EDuration="2.426619828s" podCreationTimestamp="2026-02-02 12:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:55:02.421815591 +0000 UTC m=+229.437023135" watchObservedRunningTime="2026-02-02 12:55:02.426619828 +0000 UTC m=+229.441827362" Feb 02 12:55:02 crc kubenswrapper[4703]: I0202 12:55:02.801354 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.221112 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:55:03 crc kubenswrapper[4703]: E0202 12:55:03.221426 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" containerName="controller-manager" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.221446 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" containerName="controller-manager" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.221575 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" containerName="controller-manager" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.222055 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.224543 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.225224 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.225368 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.225583 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.225774 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.226186 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.234254 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.235019 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.284723 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.285114 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.285189 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.285487 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.285532 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-j8vcz\" (UniqueName: \"kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.386516 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.387814 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.387845 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.387948 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.388230 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.388264 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-j8vcz\" (UniqueName: \"kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.389072 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.389878 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.393757 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.404763 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-j8vcz\" (UniqueName: \"kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz\") pod \"controller-manager-765cfbfd4f-zmhhv\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.543743 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.598965 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" containerID="cri-o://75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a" gracePeriod=15 Feb 02 12:55:03 crc kubenswrapper[4703]: I0202 12:55:03.943048 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="feb9f6b4-8be8-414a-add7-3f3f2e777ade" path="/var/lib/kubelet/pods/feb9f6b4-8be8-414a-add7-3f3f2e777ade/volumes" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.004700 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.087460 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198538 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198591 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198637 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198660 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198689 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198765 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198853 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198922 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.198969 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.199649 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.199762 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle" (OuterVolumeSpecName: "v4-0-config-system-trusted-ca-bundle") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-trusted-ca-bundle". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.199977 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig" (OuterVolumeSpecName: "v4-0-config-system-cliconfig") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-cliconfig". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200096 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-69kwb\" (UniqueName: \"kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200139 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200169 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200207 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200246 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection\") pod \"721e0407-77c9-416a-9297-6074e75d0a6e\" (UID: \"721e0407-77c9-416a-9297-6074e75d0a6e\") " Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200707 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-cliconfig\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200730 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-trusted-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.200745 4703 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/721e0407-77c9-416a-9297-6074e75d0a6e-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.202246 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies" (OuterVolumeSpecName: "audit-policies") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "audit-policies". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.203300 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca" (OuterVolumeSpecName: "v4-0-config-system-service-ca") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-service-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.206248 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection" (OuterVolumeSpecName: "v4-0-config-user-template-provider-selection") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-user-template-provider-selection". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.207624 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login" (OuterVolumeSpecName: "v4-0-config-user-template-login") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-user-template-login". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.207885 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error" (OuterVolumeSpecName: "v4-0-config-user-template-error") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-user-template-error". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.210797 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb" (OuterVolumeSpecName: "kube-api-access-69kwb") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "kube-api-access-69kwb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.211032 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data" (OuterVolumeSpecName: "v4-0-config-user-idp-0-file-data") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-user-idp-0-file-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.211283 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert" (OuterVolumeSpecName: "v4-0-config-system-serving-cert") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.211761 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session" (OuterVolumeSpecName: "v4-0-config-system-session") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-session". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.212042 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template" (OuterVolumeSpecName: "v4-0-config-system-ocp-branding-template") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-ocp-branding-template". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.212076 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs" (OuterVolumeSpecName: "v4-0-config-system-router-certs") pod "721e0407-77c9-416a-9297-6074e75d0a6e" (UID: "721e0407-77c9-416a-9297-6074e75d0a6e"). InnerVolumeSpecName "v4-0-config-system-router-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301743 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-login\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301782 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301793 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-ocp-branding-template\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301807 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-69kwb\" (UniqueName: \"kubernetes.io/projected/721e0407-77c9-416a-9297-6074e75d0a6e-kube-api-access-69kwb\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301816 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-idp-0-file-data\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301826 4703 reconciler_common.go:293] "Volume detached for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-audit-policies\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301835 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-service-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301848 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-provider-selection\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301858 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-router-certs\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301867 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-user-template-error\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.301878 4703 reconciler_common.go:293] "Volume detached for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/721e0407-77c9-416a-9297-6074e75d0a6e-v4-0-config-system-session\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.397943 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" event={"ID":"6a2de52f-007a-4674-8d59-62a3dd0f52de","Type":"ContainerStarted","Data":"15fa5088c432718a6f3adad550f8c4399c358ddc92f046de90a42f6d74d2adf8"} Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.399298 4703 generic.go:334] "Generic (PLEG): container finished" podID="721e0407-77c9-416a-9297-6074e75d0a6e" containerID="75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a" exitCode=0 Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.399972 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.402447 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" event={"ID":"721e0407-77c9-416a-9297-6074e75d0a6e","Type":"ContainerDied","Data":"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a"} Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.402486 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-558db77b4-m25rz" event={"ID":"721e0407-77c9-416a-9297-6074e75d0a6e","Type":"ContainerDied","Data":"7b024c2d228fa87093e930574c08a23fb32aa44e45ccf9639e0482ec17f14a4e"} Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.402509 4703 scope.go:117] "RemoveContainer" containerID="75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.434132 4703 scope.go:117] "RemoveContainer" containerID="75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a" Feb 02 12:55:04 crc kubenswrapper[4703]: E0202 12:55:04.434629 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a\": container with ID starting with 75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a not found: ID does not exist" containerID="75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.434662 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a"} err="failed to get container status \"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a\": rpc error: code = NotFound desc = could not find container \"75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a\": container with ID starting with 75f0ea1b565dcb24aa050bbe3088f1cf64f369eb31dbba8298fd6df48ed5e64a not found: ID does not exist" Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.440300 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:55:04 crc kubenswrapper[4703]: I0202 12:55:04.443034 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-authentication/oauth-openshift-558db77b4-m25rz"] Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.407461 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" event={"ID":"6a2de52f-007a-4674-8d59-62a3dd0f52de","Type":"ContainerStarted","Data":"8e21f8cf16a4628f53857f2a8afcde59e2d7bf5a3c9f65a0f5ef3f8e0da0bea2"} Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.407815 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.412419 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.427943 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" podStartSLOduration=5.427926874 podStartE2EDuration="5.427926874s" podCreationTimestamp="2026-02-02 12:55:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:55:05.424679198 +0000 UTC m=+232.439886762" watchObservedRunningTime="2026-02-02 12:55:05.427926874 +0000 UTC m=+232.443134408" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.704963 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.706456 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.743215 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:05 crc kubenswrapper[4703]: I0202 12:55:05.939733 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" path="/var/lib/kubelet/pods/721e0407-77c9-416a-9297-6074e75d0a6e/volumes" Feb 02 12:55:06 crc kubenswrapper[4703]: I0202 12:55:06.464606 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.224243 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-authentication/oauth-openshift-7979b4bd5f-8vzls"] Feb 02 12:55:07 crc kubenswrapper[4703]: E0202 12:55:07.224885 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.224904 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.225065 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="721e0407-77c9-416a-9297-6074e75d0a6e" containerName="oauth-openshift" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.225615 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.227486 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.228008 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.228414 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.228721 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.228731 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.228769 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.229844 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.230033 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.230257 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.230265 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.230426 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.231078 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.242191 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.247709 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.250776 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7979b4bd5f-8vzls"] Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.264376 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.323201 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.324077 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.336999 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-login\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337073 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337110 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337144 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337168 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337202 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-service-ca\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337231 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hhvx2\" (UniqueName: \"kubernetes.io/projected/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-kube-api-access-hhvx2\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337319 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337338 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337358 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-policies\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337379 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-error\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337396 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-dir\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337411 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-router-certs\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.337433 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-session\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.364265 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.438486 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.439466 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.439497 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.439533 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-service-ca\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.439559 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hhvx2\" (UniqueName: \"kubernetes.io/projected/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-kube-api-access-hhvx2\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.439648 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440005 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440069 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-policies\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440199 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-error\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440642 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-cliconfig\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-cliconfig\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440670 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-service-ca\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-service-ca\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440803 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-policies\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-policies\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440889 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-trusted-ca-bundle\" (UniqueName: \"kubernetes.io/configmap/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-trusted-ca-bundle\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.440960 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-dir\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.441249 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-audit-dir\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.442219 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-router-certs\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.442317 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-session\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.442341 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-login\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.442371 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.444610 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-idp-0-file-data\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-idp-0-file-data\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.445432 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-ocp-branding-template\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-ocp-branding-template\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.445547 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-error\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-error\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.445910 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-login\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-login\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.446260 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-serving-cert\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-serving-cert\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.447201 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-session\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-session\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.447373 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-system-router-certs\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-system-router-certs\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.447648 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"v4-0-config-user-template-provider-selection\" (UniqueName: \"kubernetes.io/secret/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-v4-0-config-user-template-provider-selection\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.460691 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hhvx2\" (UniqueName: \"kubernetes.io/projected/cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1-kube-api-access-hhvx2\") pod \"oauth-openshift-7979b4bd5f-8vzls\" (UID: \"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1\") " pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.475562 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.540237 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.736222 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.736629 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.777070 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:07 crc kubenswrapper[4703]: I0202 12:55:07.992295 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-authentication/oauth-openshift-7979b4bd5f-8vzls"] Feb 02 12:55:07 crc kubenswrapper[4703]: W0202 12:55:07.998156 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podcea5c031_f5fc_4cb7_8b95_26fc7eb7c9b1.slice/crio-0c6474a5e98a774088e3415fc5f0bbf5d571b9b8ae0e0fc7e984935415badbb0 WatchSource:0}: Error finding container 0c6474a5e98a774088e3415fc5f0bbf5d571b9b8ae0e0fc7e984935415badbb0: Status 404 returned error can't find the container with id 0c6474a5e98a774088e3415fc5f0bbf5d571b9b8ae0e0fc7e984935415badbb0 Feb 02 12:55:08 crc kubenswrapper[4703]: I0202 12:55:08.433584 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" event={"ID":"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1","Type":"ContainerStarted","Data":"0c6474a5e98a774088e3415fc5f0bbf5d571b9b8ae0e0fc7e984935415badbb0"} Feb 02 12:55:08 crc kubenswrapper[4703]: I0202 12:55:08.472939 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:08 crc kubenswrapper[4703]: I0202 12:55:08.756995 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:55:08 crc kubenswrapper[4703]: I0202 12:55:08.757058 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:55:08 crc kubenswrapper[4703]: I0202 12:55:08.802597 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:55:09 crc kubenswrapper[4703]: I0202 12:55:09.364388 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-4qklh"] Feb 02 12:55:09 crc kubenswrapper[4703]: I0202 12:55:09.440002 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-4qklh" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="registry-server" containerID="cri-o://715292648aa3faed93659ce0d29b5a61ff52bfd76908000aa01f5f0fc1603981" gracePeriod=2 Feb 02 12:55:09 crc kubenswrapper[4703]: I0202 12:55:09.440413 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" event={"ID":"cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1","Type":"ContainerStarted","Data":"68cf341dcf585b794ee1918e691b9f9224bcff2875e4af87a056405fec45d8bc"} Feb 02 12:55:09 crc kubenswrapper[4703]: I0202 12:55:09.484040 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:55:09 crc kubenswrapper[4703]: I0202 12:55:09.561061 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-wn5cj"] Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.466052 4703 generic.go:334] "Generic (PLEG): container finished" podID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerID="715292648aa3faed93659ce0d29b5a61ff52bfd76908000aa01f5f0fc1603981" exitCode=0 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.466246 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerDied","Data":"715292648aa3faed93659ce0d29b5a61ff52bfd76908000aa01f5f0fc1603981"} Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.466557 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-wn5cj" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="registry-server" containerID="cri-o://cf46d9c3973c96634ec200117964f20c05825d91f91007940b87b47d8b338a80" gracePeriod=2 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.501374 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" podStartSLOduration=32.501237836 podStartE2EDuration="32.501237836s" podCreationTimestamp="2026-02-02 12:54:38 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:55:10.494375654 +0000 UTC m=+237.509583188" watchObservedRunningTime="2026-02-02 12:55:10.501237836 +0000 UTC m=+237.516445400" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.707503 4703 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.708691 4703 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.708845 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.708984 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" containerID="cri-o://aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b" gracePeriod=15 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.709039 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" containerID="cri-o://6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495" gracePeriod=15 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.709111 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" containerID="cri-o://c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c" gracePeriod=15 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.709124 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" containerID="cri-o://cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54" gracePeriod=15 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.709121 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" containerID="cri-o://5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d" gracePeriod=15 Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710332 4703 kubelet.go:2421] "SyncLoop ADD" source="file" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710675 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710699 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710712 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710721 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="setup" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710736 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710745 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710753 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710760 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710773 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710780 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710793 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710801 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710812 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710820 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 12:55:10 crc kubenswrapper[4703]: E0202 12:55:10.710831 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710837 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710951 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-regeneration-controller" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710961 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-insecure-readyz" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710968 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-cert-syncer" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710978 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710987 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.710997 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.711004 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver-check-endpoints" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.805954 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806007 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806052 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806075 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806104 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806127 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806156 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.806174 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.814062 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907482 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907534 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907579 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907616 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907648 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907666 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907687 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907715 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907785 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907822 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907845 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-audit-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907867 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907888 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907913 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"kube-apiserver-startup-monitor-crc\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907933 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-cert-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:10 crc kubenswrapper[4703]: I0202 12:55:10.907954 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/71bb4a3aecc4ba5b26c4b7318770ce13-resource-dir\") pod \"kube-apiserver-crc\" (UID: \"71bb4a3aecc4ba5b26c4b7318770ce13\") " pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.112127 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:11 crc kubenswrapper[4703]: W0202 12:55:11.157824 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podf85e55b1a89d02b0cb034b1ea31ed45a.slice/crio-acb9ea406e4a52a3408b2b99d2348faaaf7ebf662da38dd4e68a5912f4ce5c2e WatchSource:0}: Error finding container acb9ea406e4a52a3408b2b99d2348faaaf7ebf662da38dd4e68a5912f4ce5c2e: Status 404 returned error can't find the container with id acb9ea406e4a52a3408b2b99d2348faaaf7ebf662da38dd4e68a5912f4ce5c2e Feb 02 12:55:11 crc kubenswrapper[4703]: E0202 12:55:11.162243 4703 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18906f31511028e8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,LastTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.202692 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.203892 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.204058 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.204221 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.313804 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content\") pod \"15647656-be1e-49d7-92dd-880ca1fd4d31\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.313890 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-qlwwm\" (UniqueName: \"kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm\") pod \"15647656-be1e-49d7-92dd-880ca1fd4d31\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.313911 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities\") pod \"15647656-be1e-49d7-92dd-880ca1fd4d31\" (UID: \"15647656-be1e-49d7-92dd-880ca1fd4d31\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.314992 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities" (OuterVolumeSpecName: "utilities") pod "15647656-be1e-49d7-92dd-880ca1fd4d31" (UID: "15647656-be1e-49d7-92dd-880ca1fd4d31"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.319100 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm" (OuterVolumeSpecName: "kube-api-access-qlwwm") pod "15647656-be1e-49d7-92dd-880ca1fd4d31" (UID: "15647656-be1e-49d7-92dd-880ca1fd4d31"). InnerVolumeSpecName "kube-api-access-qlwwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.368489 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "15647656-be1e-49d7-92dd-880ca1fd4d31" (UID: "15647656-be1e-49d7-92dd-880ca1fd4d31"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.415869 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.415922 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-qlwwm\" (UniqueName: \"kubernetes.io/projected/15647656-be1e-49d7-92dd-880ca1fd4d31-kube-api-access-qlwwm\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.415935 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/15647656-be1e-49d7-92dd-880ca1fd4d31-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.454349 4703 patch_prober.go:28] interesting pod/kube-apiserver-crc container/kube-apiserver namespace/openshift-kube-apiserver: Readiness probe status=failure output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" start-of-body= Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.454422 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="f4b27818a5e8e43d0dc095d08835c792" containerName="kube-apiserver" probeResult="failure" output="Get \"https://192.168.126.11:6443/readyz\": dial tcp 192.168.126.11:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.481220 4703 generic.go:334] "Generic (PLEG): container finished" podID="c6f1054c-67cd-46bb-b781-62716d75231a" containerID="cf46d9c3973c96634ec200117964f20c05825d91f91007940b87b47d8b338a80" exitCode=0 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.481293 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerDied","Data":"cf46d9c3973c96634ec200117964f20c05825d91f91007940b87b47d8b338a80"} Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.484244 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-check-endpoints/1.log" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487050 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487815 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54" exitCode=0 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487844 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495" exitCode=0 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487853 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d" exitCode=0 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487862 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c" exitCode=2 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.487932 4703 scope.go:117] "RemoveContainer" containerID="7146e57adf1a9458d2e5308e836edec245144027d343525a7ef37619f1bd9dca" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.489702 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"acb9ea406e4a52a3408b2b99d2348faaaf7ebf662da38dd4e68a5912f4ce5c2e"} Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.492047 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-4qklh" event={"ID":"15647656-be1e-49d7-92dd-880ca1fd4d31","Type":"ContainerDied","Data":"cfa3cedc876fb78ba9c12c247b8e1bdc11826b9f9c00cfc2f339172fb4cd6b57"} Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.492125 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-4qklh" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.492960 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.493251 4703 generic.go:334] "Generic (PLEG): container finished" podID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" containerID="dee4d94f4f54a8926ae1d82de9dd1b6d8cc6db1850945b1f5b41cdcb967a6838" exitCode=0 Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.493294 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.493295 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bd22e13b-0430-4cb9-8c95-ffbce79855cf","Type":"ContainerDied","Data":"dee4d94f4f54a8926ae1d82de9dd1b6d8cc6db1850945b1f5b41cdcb967a6838"} Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.493535 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.494065 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.494386 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.494808 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.495139 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.506234 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.506718 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.507050 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.507330 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.692535 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.693476 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.693974 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.694422 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.694699 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.694923 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.820693 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content\") pod \"c6f1054c-67cd-46bb-b781-62716d75231a\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.820789 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities\") pod \"c6f1054c-67cd-46bb-b781-62716d75231a\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.820878 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-854hb\" (UniqueName: \"kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb\") pod \"c6f1054c-67cd-46bb-b781-62716d75231a\" (UID: \"c6f1054c-67cd-46bb-b781-62716d75231a\") " Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.822211 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities" (OuterVolumeSpecName: "utilities") pod "c6f1054c-67cd-46bb-b781-62716d75231a" (UID: "c6f1054c-67cd-46bb-b781-62716d75231a"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.825313 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb" (OuterVolumeSpecName: "kube-api-access-854hb") pod "c6f1054c-67cd-46bb-b781-62716d75231a" (UID: "c6f1054c-67cd-46bb-b781-62716d75231a"). InnerVolumeSpecName "kube-api-access-854hb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.854887 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c6f1054c-67cd-46bb-b781-62716d75231a" (UID: "c6f1054c-67cd-46bb-b781-62716d75231a"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.922535 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-854hb\" (UniqueName: \"kubernetes.io/projected/c6f1054c-67cd-46bb-b781-62716d75231a-kube-api-access-854hb\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.922781 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.922796 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c6f1054c-67cd-46bb-b781-62716d75231a-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.929023 4703 scope.go:117] "RemoveContainer" containerID="715292648aa3faed93659ce0d29b5a61ff52bfd76908000aa01f5f0fc1603981" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.951111 4703 scope.go:117] "RemoveContainer" containerID="a596dbd293b0cfa248382ad3a8344281598b01f6da1e5ddd14844f54a3eda9b7" Feb 02 12:55:11 crc kubenswrapper[4703]: I0202 12:55:11.970345 4703 scope.go:117] "RemoveContainer" containerID="e3c8267aa3187ecdd304b8aa7bc02c78b43ace432b331064806e8286e7c747ed" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.501119 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" event={"ID":"f85e55b1a89d02b0cb034b1ea31ed45a","Type":"ContainerStarted","Data":"e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019"} Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.502135 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.502513 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.502875 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.503191 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.506354 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-wn5cj" event={"ID":"c6f1054c-67cd-46bb-b781-62716d75231a","Type":"ContainerDied","Data":"22d9322fd663fc4618c399222addb1c6ad30e3644f0047778c267dd8bc2e86b2"} Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.506394 4703 scope.go:117] "RemoveContainer" containerID="cf46d9c3973c96634ec200117964f20c05825d91f91007940b87b47d8b338a80" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.506417 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-wn5cj" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.507126 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.507446 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.508232 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.509074 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.510486 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.510615 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.510765 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.511316 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.511707 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.522377 4703 scope.go:117] "RemoveContainer" containerID="8e1a5d5d4f5a724821134712f71e25f210d6f87c07fc0501162aea0707a35798" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.538709 4703 scope.go:117] "RemoveContainer" containerID="434ffea7da1ba510463a499d39e93f97217a52714ef42b1581895b3830126f44" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.785546 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.786302 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.786720 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.786943 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.787145 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834388 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock\") pod \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834503 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir\") pod \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834588 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access\") pod \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\" (UID: \"bd22e13b-0430-4cb9-8c95-ffbce79855cf\") " Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834490 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock" (OuterVolumeSpecName: "var-lock") pod "bd22e13b-0430-4cb9-8c95-ffbce79855cf" (UID: "bd22e13b-0430-4cb9-8c95-ffbce79855cf"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834575 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir" (OuterVolumeSpecName: "kubelet-dir") pod "bd22e13b-0430-4cb9-8c95-ffbce79855cf" (UID: "bd22e13b-0430-4cb9-8c95-ffbce79855cf"). InnerVolumeSpecName "kubelet-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834797 4703 reconciler_common.go:293] "Volume detached for volume \"kubelet-dir\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kubelet-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.834808 4703 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/bd22e13b-0430-4cb9-8c95-ffbce79855cf-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.838846 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access" (OuterVolumeSpecName: "kube-api-access") pod "bd22e13b-0430-4cb9-8c95-ffbce79855cf" (UID: "bd22e13b-0430-4cb9-8c95-ffbce79855cf"). InnerVolumeSpecName "kube-api-access". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:55:12 crc kubenswrapper[4703]: I0202 12:55:12.936097 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access\" (UniqueName: \"kubernetes.io/projected/bd22e13b-0430-4cb9-8c95-ffbce79855cf-kube-api-access\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.230607 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.231710 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.232617 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.233138 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.233466 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.233707 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.234021 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.341964 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342034 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342100 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") pod \"f4b27818a5e8e43d0dc095d08835c792\" (UID: \"f4b27818a5e8e43d0dc095d08835c792\") " Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342160 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir" (OuterVolumeSpecName: "audit-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "audit-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342189 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342258 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir" (OuterVolumeSpecName: "cert-dir") pod "f4b27818a5e8e43d0dc095d08835c792" (UID: "f4b27818a5e8e43d0dc095d08835c792"). InnerVolumeSpecName "cert-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342645 4703 reconciler_common.go:293] "Volume detached for volume \"audit-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-audit-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342672 4703 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.342693 4703 reconciler_common.go:293] "Volume detached for volume \"cert-dir\" (UniqueName: \"kubernetes.io/host-path/f4b27818a5e8e43d0dc095d08835c792-cert-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.519716 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/installer-9-crc" event={"ID":"bd22e13b-0430-4cb9-8c95-ffbce79855cf","Type":"ContainerDied","Data":"24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111"} Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.519775 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/installer-9-crc" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.519786 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="24162ff10f6a2a8e3809c41c97e2adcd7f38c9dc48f819450568e9fbff8df111" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.524741 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-crc_f4b27818a5e8e43d0dc095d08835c792/kube-apiserver-cert-syncer/0.log" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.525696 4703 generic.go:334] "Generic (PLEG): container finished" podID="f4b27818a5e8e43d0dc095d08835c792" containerID="aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b" exitCode=0 Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.525788 4703 scope.go:117] "RemoveContainer" containerID="cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.525861 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.543657 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.544117 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.544297 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.544470 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.544629 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.544919 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.545089 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.545288 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.545454 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.545616 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.552970 4703 scope.go:117] "RemoveContainer" containerID="6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.570297 4703 scope.go:117] "RemoveContainer" containerID="5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.588257 4703 scope.go:117] "RemoveContainer" containerID="c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.603933 4703 scope.go:117] "RemoveContainer" containerID="aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.619659 4703 scope.go:117] "RemoveContainer" containerID="9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.639315 4703 scope.go:117] "RemoveContainer" containerID="cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.640478 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\": container with ID starting with cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54 not found: ID does not exist" containerID="cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.640513 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54"} err="failed to get container status \"cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\": rpc error: code = NotFound desc = could not find container \"cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54\": container with ID starting with cd5311f84f86459b879154426b466b15b1ef6ee465771e96bffd7f93085dbb54 not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.640543 4703 scope.go:117] "RemoveContainer" containerID="6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.640959 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\": container with ID starting with 6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495 not found: ID does not exist" containerID="6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.640986 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495"} err="failed to get container status \"6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\": rpc error: code = NotFound desc = could not find container \"6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495\": container with ID starting with 6299ca31e1e9ec460447051e9d16b27fe90688492e8c0816e6e4666c32b51495 not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.641005 4703 scope.go:117] "RemoveContainer" containerID="5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.641319 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\": container with ID starting with 5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d not found: ID does not exist" containerID="5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.641342 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d"} err="failed to get container status \"5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\": rpc error: code = NotFound desc = could not find container \"5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d\": container with ID starting with 5e8313fe4e3c637d3d693d3e295d22e1d25eab458e0a774c6f32e85b7e6fd05d not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.641359 4703 scope.go:117] "RemoveContainer" containerID="c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.643053 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\": container with ID starting with c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c not found: ID does not exist" containerID="c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.643081 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c"} err="failed to get container status \"c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\": rpc error: code = NotFound desc = could not find container \"c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c\": container with ID starting with c96c72779074ba7434f44f3aa7fa7bb1a81d2776bc988acddf11f07a8e863d2c not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.643098 4703 scope.go:117] "RemoveContainer" containerID="aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.643437 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\": container with ID starting with aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b not found: ID does not exist" containerID="aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.643461 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b"} err="failed to get container status \"aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\": rpc error: code = NotFound desc = could not find container \"aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b\": container with ID starting with aee59ce7f9966fae4d2573387d7591e2d2b4074ebfe86a5718afba5697d4e93b not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.643481 4703 scope.go:117] "RemoveContainer" containerID="9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612" Feb 02 12:55:13 crc kubenswrapper[4703]: E0202 12:55:13.643788 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\": container with ID starting with 9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612 not found: ID does not exist" containerID="9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.643822 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612"} err="failed to get container status \"9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\": rpc error: code = NotFound desc = could not find container \"9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612\": container with ID starting with 9b39207fb67458f69c93cfa49c36307e9edb5a02268e41ac27919a3430aef612 not found: ID does not exist" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.937124 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.938181 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.939323 4703 status_manager.go:851] "Failed to get status for pod" podUID="f4b27818a5e8e43d0dc095d08835c792" pod="openshift-kube-apiserver/kube-apiserver-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.939551 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.940179 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:13 crc kubenswrapper[4703]: I0202 12:55:13.942809 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f4b27818a5e8e43d0dc095d08835c792" path="/var/lib/kubelet/pods/f4b27818a5e8e43d0dc095d08835c792/volumes" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.233434 4703 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18906f31511028e8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,LastTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.835493 4703 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.835849 4703 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.836262 4703 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.836887 4703 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.837351 4703 controller.go:195] "Failed to update lease" err="Put \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:14 crc kubenswrapper[4703]: I0202 12:55:14.837418 4703 controller.go:115] "failed to update lease using latest lease, fallback to ensure lease" err="failed 5 attempts to update lease" Feb 02 12:55:14 crc kubenswrapper[4703]: E0202 12:55:14.837770 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="200ms" Feb 02 12:55:15 crc kubenswrapper[4703]: E0202 12:55:15.039118 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="400ms" Feb 02 12:55:15 crc kubenswrapper[4703]: E0202 12:55:15.439964 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="800ms" Feb 02 12:55:16 crc kubenswrapper[4703]: E0202 12:55:16.240574 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="1.6s" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.541322 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.547426 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.548006 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.548660 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.549121 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.549492 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:17 crc kubenswrapper[4703]: I0202 12:55:17.549840 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:17 crc kubenswrapper[4703]: E0202 12:55:17.841959 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="3.2s" Feb 02 12:55:20 crc kubenswrapper[4703]: E0202 12:55:20.000943 4703 desired_state_of_world_populator.go:312] "Error processing volume" err="error processing PVC openshift-image-registry/crc-image-registry-storage: failed to fetch PVC from API server: Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-image-registry/persistentvolumeclaims/crc-image-registry-storage\": dial tcp 38.102.83.198:6443: connect: connection refused" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" volumeName="registry-storage" Feb 02 12:55:21 crc kubenswrapper[4703]: E0202 12:55:21.043900 4703 controller.go:145] "Failed to ensure lease exists, will retry" err="Get \"https://api-int.crc.testing:6443/apis/coordination.k8s.io/v1/namespaces/kube-node-lease/leases/crc?timeout=10s\": dial tcp 38.102.83.198:6443: connect: connection refused" interval="6.4s" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.594381 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.594675 4703 generic.go:334] "Generic (PLEG): container finished" podID="f614b9022728cf315e60c057852e563e" containerID="238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604" exitCode=1 Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.594707 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerDied","Data":"238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604"} Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.595237 4703 scope.go:117] "RemoveContainer" containerID="238a75515f6a89d31e2cc9136de530b182568822f0dce4592c1d34d3aede9604" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.595565 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.595914 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.596152 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.596466 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.596721 4703 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.597062 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.933402 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.936996 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.937913 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.938513 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.938825 4703 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.939188 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.939743 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.940510 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.940746 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.940936 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.941108 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.941365 4703 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.941701 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.950571 4703 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.950608 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:23 crc kubenswrapper[4703]: E0202 12:55:23.950934 4703 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:23 crc kubenswrapper[4703]: I0202 12:55:23.951456 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:23 crc kubenswrapper[4703]: W0202 12:55:23.968565 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod71bb4a3aecc4ba5b26c4b7318770ce13.slice/crio-4e4d40d80bacb6a4a6709f4b89f8567efcaf1f7247c0355fade1aba288695a7e WatchSource:0}: Error finding container 4e4d40d80bacb6a4a6709f4b89f8567efcaf1f7247c0355fade1aba288695a7e: Status 404 returned error can't find the container with id 4e4d40d80bacb6a4a6709f4b89f8567efcaf1f7247c0355fade1aba288695a7e Feb 02 12:55:24 crc kubenswrapper[4703]: E0202 12:55:24.234699 4703 event.go:368] "Unable to write event (may retry after sleeping)" err="Post \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/events\": dial tcp 38.102.83.198:6443: connect: connection refused" event="&Event{ObjectMeta:{kube-apiserver-startup-monitor-crc.18906f31511028e8 openshift-kube-apiserver 0 0001-01-01 00:00:00 +0000 UTC map[] map[] [] [] []},InvolvedObject:ObjectReference{Kind:Pod,Namespace:openshift-kube-apiserver,Name:kube-apiserver-startup-monitor-crc,UID:f85e55b1a89d02b0cb034b1ea31ed45a,APIVersion:v1,ResourceVersion:,FieldPath:spec.containers{startup-monitor},},Reason:Pulled,Message:Container image \"quay.io/crcont/openshift-crc-cluster-kube-apiserver-operator@sha256:9f36dc276e27753fc478274c7f7814a4f8945c987117ee1ea3b8e6355e6d7462\" already present on machine,Source:EventSource{Component:kubelet,Host:crc,},FirstTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,LastTimestamp:2026-02-02 12:55:11.160699112 +0000 UTC m=+238.175906636,Count:1,Type:Normal,EventTime:0001-01-01 00:00:00 +0000 UTC,Series:nil,Action:,Related:nil,ReportingController:kubelet,ReportingInstance:crc,}" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.609161 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-controller-manager_kube-controller-manager-crc_f614b9022728cf315e60c057852e563e/kube-controller-manager/0.log" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.609358 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-controller-manager/kube-controller-manager-crc" event={"ID":"f614b9022728cf315e60c057852e563e","Type":"ContainerStarted","Data":"8d1599ed30c2ee1b351257a4d37985a484451c9118e1c5ec218db41e6fe96bc7"} Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.610917 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611000 4703 generic.go:334] "Generic (PLEG): container finished" podID="71bb4a3aecc4ba5b26c4b7318770ce13" containerID="dbf6091231781076ce378298c139dab36555c26ba94f6753aade8a4ce3885a79" exitCode=0 Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611031 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerDied","Data":"dbf6091231781076ce378298c139dab36555c26ba94f6753aade8a4ce3885a79"} Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611046 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"4e4d40d80bacb6a4a6709f4b89f8567efcaf1f7247c0355fade1aba288695a7e"} Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611247 4703 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611292 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611358 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: E0202 12:55:24.611710 4703 mirror_client.go:138] "Failed deleting a mirror pod" err="Delete \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.611777 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.612346 4703 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.612762 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.613177 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.613669 4703 status_manager.go:851] "Failed to get status for pod" podUID="cea5c031-f5fc-4cb7-8b95-26fc7eb7c9b1" pod="openshift-authentication/oauth-openshift-7979b4bd5f-8vzls" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-authentication/pods/oauth-openshift-7979b4bd5f-8vzls\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.614042 4703 status_manager.go:851] "Failed to get status for pod" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" pod="openshift-marketplace/certified-operators-4qklh" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/certified-operators-4qklh\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.614482 4703 status_manager.go:851] "Failed to get status for pod" podUID="f614b9022728cf315e60c057852e563e" pod="openshift-kube-controller-manager/kube-controller-manager-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-controller-manager/pods/kube-controller-manager-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.614850 4703 status_manager.go:851] "Failed to get status for pod" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/kube-apiserver-startup-monitor-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.615431 4703 status_manager.go:851] "Failed to get status for pod" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" pod="openshift-kube-apiserver/installer-9-crc" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-kube-apiserver/pods/installer-9-crc\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:24 crc kubenswrapper[4703]: I0202 12:55:24.616032 4703 status_manager.go:851] "Failed to get status for pod" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" pod="openshift-marketplace/redhat-marketplace-wn5cj" err="Get \"https://api-int.crc.testing:6443/api/v1/namespaces/openshift-marketplace/pods/redhat-marketplace-wn5cj\": dial tcp 38.102.83.198:6443: connect: connection refused" Feb 02 12:55:25 crc kubenswrapper[4703]: I0202 12:55:25.620464 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"232cfe0f502ca81de6c123f75e93698e6cacd5df4df2a198300e2748a7122291"} Feb 02 12:55:25 crc kubenswrapper[4703]: I0202 12:55:25.620880 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"939bf09aa23cd4eb07a1ef3353e5257cad22e195d1bc40b6f21bb49c126247bb"} Feb 02 12:55:25 crc kubenswrapper[4703]: I0202 12:55:25.620892 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"c4beebef09d438b0912aaf740c4348f0e4ceceecbfce6ebdd5f88b3fa9aa2610"} Feb 02 12:55:25 crc kubenswrapper[4703]: I0202 12:55:25.620902 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"9cac0d34e3307ae135e40cedbef3577cbdb1433737ebdf4e068b2d4505a95c2a"} Feb 02 12:55:26 crc kubenswrapper[4703]: I0202 12:55:26.627916 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-kube-apiserver/kube-apiserver-crc" event={"ID":"71bb4a3aecc4ba5b26c4b7318770ce13","Type":"ContainerStarted","Data":"a1443ce68645681d4018a71cfcc3151f35f9909c3acb1e1d51a4b46c4df7571e"} Feb 02 12:55:26 crc kubenswrapper[4703]: I0202 12:55:26.628125 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:26 crc kubenswrapper[4703]: I0202 12:55:26.628254 4703 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:26 crc kubenswrapper[4703]: I0202 12:55:26.628306 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:27 crc kubenswrapper[4703]: I0202 12:55:27.647871 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:55:27 crc kubenswrapper[4703]: I0202 12:55:27.652347 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:55:28 crc kubenswrapper[4703]: I0202 12:55:28.636874 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:55:28 crc kubenswrapper[4703]: I0202 12:55:28.952836 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:28 crc kubenswrapper[4703]: I0202 12:55:28.952885 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:28 crc kubenswrapper[4703]: I0202 12:55:28.959366 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:31 crc kubenswrapper[4703]: I0202 12:55:31.637969 4703 kubelet.go:1914] "Deleted mirror pod because it is outdated" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:32 crc kubenswrapper[4703]: I0202 12:55:32.655293 4703 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:32 crc kubenswrapper[4703]: I0202 12:55:32.655331 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:32 crc kubenswrapper[4703]: I0202 12:55:32.661162 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:33 crc kubenswrapper[4703]: I0202 12:55:33.659535 4703 kubelet.go:1909] "Trying to delete pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:33 crc kubenswrapper[4703]: I0202 12:55:33.659565 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-crc" podUID="7fa7c43a-a618-412d-8e3a-aee8148de65b" Feb 02 12:55:33 crc kubenswrapper[4703]: I0202 12:55:33.948021 4703 status_manager.go:861] "Pod was deleted and then recreated, skipping status update" pod="openshift-kube-apiserver/kube-apiserver-crc" oldPodUID="71bb4a3aecc4ba5b26c4b7318770ce13" podUID="b30c75cd-5688-49f1-89d4-8932e539b882" Feb 02 12:55:40 crc kubenswrapper[4703]: I0202 12:55:40.260907 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-controller-manager/kube-controller-manager-crc" Feb 02 12:55:42 crc kubenswrapper[4703]: I0202 12:55:42.831845 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"serving-cert" Feb 02 12:55:42 crc kubenswrapper[4703]: I0202 12:55:42.914226 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-node-dockercfg-pwtwl" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.022101 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"machine-config-operator-images" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.214319 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"openshift-service-ca.crt" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.228928 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"openshift-service-ca.crt" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.264825 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.546302 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"service-ca-bundle" Feb 02 12:55:43 crc kubenswrapper[4703]: I0202 12:55:43.651381 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.043991 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-serving-cert" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.102762 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"packageserver-service-cert" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.723252 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"openshift-service-ca.crt" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.740921 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"openshift-service-ca.crt" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.765623 4703 reflector.go:368] Caches populated for *v1.Node from k8s.io/client-go/informers/factory.go:160 Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.808433 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"node-ca-dockercfg-4777p" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.839145 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"etcd-serving-ca" Feb 02 12:55:44 crc kubenswrapper[4703]: I0202 12:55:44.944435 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"kube-root-ca.crt" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.052541 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.184069 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"ovnkube-identity-cm" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.208042 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"oauth-apiserver-sa-dockercfg-6r2bq" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.259214 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"image-registry-certificates" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.346252 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"openshift-service-ca.crt" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.524803 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-login" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.638867 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"openshift-service-ca.crt" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.734337 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.773494 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-trusted-ca-bundle" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.774227 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"console-operator-config" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.863113 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"kube-storage-version-migrator-operator-dockercfg-2bh8d" Feb 02 12:55:45 crc kubenswrapper[4703]: I0202 12:55:45.914478 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"etcd-client" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.119844 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-serving-cert" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.256933 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"installation-pull-secrets" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.338065 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-root-ca.crt" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.469685 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"openshift-service-ca.crt" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.489107 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-ca-bundle" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.489706 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"service-ca-operator-config" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.495551 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-controller-dockercfg-c2lfx" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.555266 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-tls" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.573026 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-default-metrics-tls" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.580039 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-stats-default" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.713044 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"node-resolver-dockercfg-kz9s7" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.770968 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-service-ca.crt" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.823619 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-dockercfg-qt55r" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.844006 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-root-ca.crt" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.887893 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mco-proxy-tls" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.917485 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"console-config" Feb 02 12:55:46 crc kubenswrapper[4703]: I0202 12:55:46.962319 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"proxy-tls" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.071934 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.123315 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.163454 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"openshift-service-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.215400 4703 reflector.go:368] Caches populated for *v1.Pod from pkg/kubelet/config/apiserver.go:66 Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.220112 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podStartSLOduration=37.220091642 podStartE2EDuration="37.220091642s" podCreationTimestamp="2026-02-02 12:55:10 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:55:31.736381669 +0000 UTC m=+258.751589193" watchObservedRunningTime="2026-02-02 12:55:47.220091642 +0000 UTC m=+274.235299176" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.221024 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc","openshift-marketplace/redhat-marketplace-wn5cj","openshift-marketplace/certified-operators-4qklh"] Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.221090 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-kube-apiserver/kube-apiserver-crc"] Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.223025 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"serving-cert" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.225321 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-kube-apiserver/kube-apiserver-crc" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.241859 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-kube-apiserver/kube-apiserver-crc" podStartSLOduration=16.241839981 podStartE2EDuration="16.241839981s" podCreationTimestamp="2026-02-02 12:55:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:55:47.237653982 +0000 UTC m=+274.252861516" watchObservedRunningTime="2026-02-02 12:55:47.241839981 +0000 UTC m=+274.257047515" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.301614 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-cliconfig" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.324933 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.397697 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.401801 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"openshift-apiserver-sa-dockercfg-djjff" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.414307 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-tls" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.429268 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"audit" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.440893 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"config" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.445739 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.480432 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"catalog-operator-serving-cert" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.504426 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"encryption-config-1" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.505445 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-operator-dockercfg-98p87" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.516304 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.641595 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"openshift-service-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.677137 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns"/"dns-dockercfg-jwfmh" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.756330 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.775690 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"oauth-serving-cert" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.833014 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.833684 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-canary"/"kube-root-ca.crt" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.941246 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" path="/var/lib/kubelet/pods/15647656-be1e-49d7-92dd-880ca1fd4d31/volumes" Feb 02 12:55:47 crc kubenswrapper[4703]: I0202 12:55:47.942077 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" path="/var/lib/kubelet/pods/c6f1054c-67cd-46bb-b781-62716d75231a/volumes" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.003453 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"oauth-openshift-dockercfg-znhcc" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.071130 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-daemon-dockercfg-r5tcq" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.150152 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-tls" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.243868 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serving-cert" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.270587 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.309194 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-oauth-apiserver"/"serving-cert" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.315439 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"kube-root-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.320599 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.434594 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.446585 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"trusted-ca-bundle" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.457981 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"metrics-tls" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.498826 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"openshift-config-operator-dockercfg-7pc5z" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.500565 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.517747 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"kube-root-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.535502 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.576314 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-dockercfg-f62pw" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.625566 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress"/"kube-root-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.628166 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"service-ca-operator-dockercfg-rg9jl" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.649531 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-config-operator"/"kube-rbac-proxy" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.711949 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"openshift-service-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.744600 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-dockercfg-vw8fw" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.771527 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-sa-dockercfg-d427c" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.861242 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"default-dockercfg-gxtc4" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.893499 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"kube-root-ca.crt" Feb 02 12:55:48 crc kubenswrapper[4703]: I0202 12:55:48.937625 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-root-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.001468 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"etcd-serving-ca" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.033880 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-config" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.217385 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"service-ca-dockercfg-pn86c" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.241873 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"kube-root-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.407565 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-router-certs" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.454942 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"kube-root-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.468954 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"kube-root-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.470377 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"signing-cabundle" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.475936 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-service-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.531330 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-root-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.579670 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"trusted-ca-bundle" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.637204 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"trusted-ca" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.637503 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"openshift-service-ca.crt" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.644529 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"olm-operator-serviceaccount-dockercfg-rq7zk" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.709978 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca"/"signing-key" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.710150 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-dockercfg-r9srn" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.714311 4703 reflector.go:368] Caches populated for *v1.Secret from object-"hostpath-provisioner"/"csi-hostpath-provisioner-sa-dockercfg-qd74k" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.739698 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-dockercfg-gkqpw" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.819224 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-error" Feb 02 12:55:49 crc kubenswrapper[4703]: I0202 12:55:49.854425 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.106309 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-session" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.114335 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-serving-cert" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.180197 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-diagnostics"/"openshift-service-ca.crt" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.202107 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-operator-config" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.284508 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"dns-operator-dockercfg-9mqw5" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.296347 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication-operator"/"authentication-operator-dockercfg-mz9bj" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.309551 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"openshift-kube-scheduler-operator-config" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.401950 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"metrics-daemon-secret" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.458133 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.476091 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"env-overrides" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.488018 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ancillary-tools-dockercfg-vnmsz" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.497711 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"marketplace-trusted-ca" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.526783 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-serving-cert" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.562190 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-dns-operator"/"metrics-tls" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.613323 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-tls" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.639971 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"iptables-alerter-script" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.656414 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"service-ca-bundle" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.678614 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-oauth-config" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.692676 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.718524 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"cluster-image-registry-operator-dockercfg-m4qtx" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.883567 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"registry-dockercfg-kzzsd" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.933744 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"kube-root-ca.crt" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.937084 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-admission-controller-secret" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.947973 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"openshift-service-ca.crt" Feb 02 12:55:50 crc kubenswrapper[4703]: I0202 12:55:50.949604 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication-operator"/"authentication-operator-config" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.106027 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"kube-root-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.188601 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-marketplace"/"kube-root-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.349814 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-operator-serving-cert" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.450881 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"machine-api-operator-dockercfg-mfbb7" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.537823 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"openshift-service-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.544027 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-kubernetes-control-plane-dockercfg-gs7dd" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.607653 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-dockercfg-x57mr" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.614447 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"openshift-service-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.707293 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"mcc-proxy-tls" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.712639 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-serving-cert" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.735168 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"service-ca" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.740222 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"env-overrides" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.741348 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"openshift-service-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.784612 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-node-metrics-cert" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.818044 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-metrics" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.818848 4703 reflector.go:368] Caches populated for *v1.RuntimeClass from k8s.io/client-go/informers/factory.go:160 Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.915602 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-operator"/"metrics-tls" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.920199 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console"/"kube-root-ca.crt" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.989612 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-console"/"networking-console-plugin" Feb 02 12:55:51 crc kubenswrapper[4703]: I0202 12:55:51.993536 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-dockercfg-xtcjv" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.003333 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"multus-ac-dockercfg-9lkdf" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.027945 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-samples-operator"/"openshift-service-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.059612 4703 reflector.go:368] Caches populated for *v1.Service from k8s.io/client-go/informers/factory.go:160 Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.077208 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"default-dockercfg-chnjx" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.078021 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"openshift-service-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.091622 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-dockercfg-zdk86" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.103722 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"kube-root-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.126137 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"openshift-service-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.238835 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"node-bootstrapper-token" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.278931 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"kube-rbac-proxy" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.328551 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"kube-root-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.332918 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-api"/"control-plane-machine-set-operator-dockercfg-k9rxt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.377266 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator-operator"/"serving-cert" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.592664 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca"/"kube-root-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.671203 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns"/"dns-default" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.671738 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"audit-1" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.727986 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.773806 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-machine-config-operator"/"machine-config-server-dockercfg-qx5rd" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.776722 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"cluster-samples-operator-dockercfg-xpp9w" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.783966 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"hostpath-provisioner"/"kube-root-ca.crt" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.791562 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-metrics-certs-default" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.805617 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.858694 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 12:55:52 crc kubenswrapper[4703]: I0202 12:55:52.970914 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"openshift-service-ca.crt" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.081818 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.221471 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"etcd-client" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.272652 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-scheduler-operator"/"kube-scheduler-operator-serving-cert" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.276642 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"encryption-config-1" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.287193 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"machine-approver-config" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.363316 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"kube-root-ca.crt" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.371202 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.387698 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"cni-copy-resources" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.464660 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-console-operator"/"kube-root-ca.crt" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.521636 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"marketplace-operator-dockercfg-5nsgg" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.528779 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"etcd-service-ca-bundle" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.568523 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"default-dockercfg-2llfx" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.696615 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-console"/"networking-console-plugin-cert" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.864933 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"audit-1" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.901510 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"default-cni-sysctl-allowlist" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.970431 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-oauth-apiserver"/"trusted-ca-bundle" Feb 02 12:55:53 crc kubenswrapper[4703]: I0202 12:55:53.976125 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-machine-approver"/"kube-rbac-proxy" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.003368 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-multus"/"multus-daemon-config" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.011436 4703 kubelet.go:2431] "SyncLoop REMOVE" source="file" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.011787 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" containerID="cri-o://e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019" gracePeriod=5 Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.034780 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"config" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.175842 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-template-provider-selection" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.178107 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator"/"kube-root-ca.crt" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.179309 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"openshift-service-ca.crt" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.217702 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.223405 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-multus"/"default-dockercfg-2q5b6" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.316105 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"trusted-ca-bundle" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.316449 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver-operator"/"openshift-apiserver-operator-config" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.337544 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-operator"/"ingress-operator-dockercfg-7lnqk" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.480304 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-config-operator"/"openshift-service-ca.crt" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.482074 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-authentication"/"v4-0-config-system-service-ca" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.558057 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-machine-api"/"machine-api-operator-images" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.613940 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-apiserver-operator"/"kube-apiserver-operator-config" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.624223 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-operator-tls" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.649451 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ovn-kubernetes"/"ovn-control-plane-metrics-cert" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.651164 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-image-registry"/"image-registry-tls" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.655986 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-machine-approver"/"machine-approver-sa-dockercfg-nl2j4" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.680182 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"kube-root-ca.crt" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.777537 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"trusted-ca" Feb 02 12:55:54 crc kubenswrapper[4703]: I0202 12:55:54.971724 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-service-ca-operator"/"openshift-service-ca.crt" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.002557 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.089883 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-etcd-operator"/"etcd-client" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.569940 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress"/"router-certs-default" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.570328 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ingress-operator"/"openshift-service-ca.crt" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.576599 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-image-registry"/"trusted-ca" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.597200 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"pprof-cert" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.597213 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console-operator"/"console-operator-dockercfg-4xjcr" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.703125 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"openshift-service-ca.crt" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.803365 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-version"/"cluster-version-operator-serving-cert" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.893418 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-controller-manager-operator"/"kube-controller-manager-operator-config" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.981518 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-apiserver"/"image-import-ca" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.981682 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"package-server-manager-serving-cert" Feb 02 12:55:55 crc kubenswrapper[4703]: I0202 12:55:55.994206 4703 reflector.go:368] Caches populated for *v1.CSIDriver from k8s.io/client-go/informers/factory.go:160 Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.042437 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-cluster-samples-operator"/"samples-operator-tls" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.125395 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-config-operator"/"config-operator-serving-cert" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.134416 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-service-ca-operator"/"serving-cert" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.149155 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-storage-version-migrator-operator"/"openshift-service-ca.crt" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.182174 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager-operator"/"openshift-controller-manager-operator-config" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.256555 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-ovn-kubernetes"/"ovnkube-script-lib" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.569616 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-console"/"console-serving-cert" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.713538 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-operator"/"kube-root-ca.crt" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.855922 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-network-node-identity"/"network-node-identity-cert" Feb 02 12:55:56 crc kubenswrapper[4703]: I0202 12:55:56.966481 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-ingress-canary"/"canary-serving-cert" Feb 02 12:55:57 crc kubenswrapper[4703]: I0202 12:55:57.320390 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-kube-scheduler-operator"/"kube-root-ca.crt" Feb 02 12:55:57 crc kubenswrapper[4703]: I0202 12:55:57.343113 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-system-ocp-branding-template" Feb 02 12:55:57 crc kubenswrapper[4703]: I0202 12:55:57.422541 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-apiserver"/"serving-cert" Feb 02 12:55:57 crc kubenswrapper[4703]: I0202 12:55:57.772549 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-authentication"/"v4-0-config-user-idp-0-file-data" Feb 02 12:55:58 crc kubenswrapper[4703]: I0202 12:55:58.431925 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-dns-operator"/"kube-root-ca.crt" Feb 02 12:55:58 crc kubenswrapper[4703]: I0202 12:55:58.963799 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-network-node-identity"/"kube-root-ca.crt" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.124845 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-kube-storage-version-migrator"/"kube-storage-version-migrator-sa-dockercfg-5xfcg" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.594645 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.594976 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.756817 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.756932 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.756955 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log" (OuterVolumeSpecName: "var-log") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.756969 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757009 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir" (OuterVolumeSpecName: "resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757065 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757142 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") pod \"f85e55b1a89d02b0cb034b1ea31ed45a\" (UID: \"f85e55b1a89d02b0cb034b1ea31ed45a\") " Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757161 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests" (OuterVolumeSpecName: "manifests") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "manifests". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757190 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock" (OuterVolumeSpecName: "var-lock") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "var-lock". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757388 4703 reconciler_common.go:293] "Volume detached for volume \"resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757402 4703 reconciler_common.go:293] "Volume detached for volume \"manifests\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-manifests\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757423 4703 reconciler_common.go:293] "Volume detached for volume \"var-lock\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-lock\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.757430 4703 reconciler_common.go:293] "Volume detached for volume \"var-log\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-var-log\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.764578 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir" (OuterVolumeSpecName: "pod-resource-dir") pod "f85e55b1a89d02b0cb034b1ea31ed45a" (UID: "f85e55b1a89d02b0cb034b1ea31ed45a"). InnerVolumeSpecName "pod-resource-dir". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.796231 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-cluster-version"/"kube-root-ca.crt" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.815950 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-kube-apiserver_kube-apiserver-startup-monitor-crc_f85e55b1a89d02b0cb034b1ea31ed45a/startup-monitor/0.log" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.816003 4703 generic.go:334] "Generic (PLEG): container finished" podID="f85e55b1a89d02b0cb034b1ea31ed45a" containerID="e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019" exitCode=137 Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.816048 4703 scope.go:117] "RemoveContainer" containerID="e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.816155 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.835435 4703 scope.go:117] "RemoveContainer" containerID="e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019" Feb 02 12:55:59 crc kubenswrapper[4703]: E0202 12:55:59.835944 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019\": container with ID starting with e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019 not found: ID does not exist" containerID="e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.835999 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019"} err="failed to get container status \"e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019\": rpc error: code = NotFound desc = could not find container \"e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019\": container with ID starting with e168df3a36b2d6ef07a6178c2b2fe24dcd094dd25be2e75e38133d89c67a9019 not found: ID does not exist" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.858186 4703 reconciler_common.go:293] "Volume detached for volume \"pod-resource-dir\" (UniqueName: \"kubernetes.io/host-path/f85e55b1a89d02b0cb034b1ea31ed45a-pod-resource-dir\") on node \"crc\" DevicePath \"\"" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.915395 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-etcd-operator"/"openshift-service-ca.crt" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.940997 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" path="/var/lib/kubelet/pods/f85e55b1a89d02b0cb034b1ea31ed45a/volumes" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.941292 4703 mirror_client.go:130] "Deleting a mirror pod" pod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" podUID="" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.954311 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.954385 4703 kubelet.go:2649] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="cbd7550f-d171-4784-a578-8fe056e335bb" Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.966941 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-kube-apiserver/kube-apiserver-startup-monitor-crc"] Feb 02 12:55:59 crc kubenswrapper[4703]: I0202 12:55:59.966997 4703 kubelet.go:2673] "Unable to find pod for mirror pod, skipping" mirrorPod="openshift-kube-apiserver/kube-apiserver-startup-monitor-crc" mirrorPodUID="cbd7550f-d171-4784-a578-8fe056e335bb" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.052493 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.052844 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" podUID="6a2de52f-007a-4674-8d59-62a3dd0f52de" containerName="controller-manager" containerID="cri-o://8e21f8cf16a4628f53857f2a8afcde59e2d7bf5a3c9f65a0f5ef3f8e0da0bea2" gracePeriod=30 Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.157137 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.157544 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" podUID="cf096749-b5ec-4fde-a26f-32372db74d5b" containerName="route-controller-manager" containerID="cri-o://454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362" gracePeriod=30 Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.581196 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.670380 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4hjf7\" (UniqueName: \"kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7\") pod \"cf096749-b5ec-4fde-a26f-32372db74d5b\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.671237 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert\") pod \"cf096749-b5ec-4fde-a26f-32372db74d5b\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.671315 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config\") pod \"cf096749-b5ec-4fde-a26f-32372db74d5b\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.671365 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca\") pod \"cf096749-b5ec-4fde-a26f-32372db74d5b\" (UID: \"cf096749-b5ec-4fde-a26f-32372db74d5b\") " Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.672028 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca" (OuterVolumeSpecName: "client-ca") pod "cf096749-b5ec-4fde-a26f-32372db74d5b" (UID: "cf096749-b5ec-4fde-a26f-32372db74d5b"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.672050 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config" (OuterVolumeSpecName: "config") pod "cf096749-b5ec-4fde-a26f-32372db74d5b" (UID: "cf096749-b5ec-4fde-a26f-32372db74d5b"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.674575 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7" (OuterVolumeSpecName: "kube-api-access-4hjf7") pod "cf096749-b5ec-4fde-a26f-32372db74d5b" (UID: "cf096749-b5ec-4fde-a26f-32372db74d5b"). InnerVolumeSpecName "kube-api-access-4hjf7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.681990 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "cf096749-b5ec-4fde-a26f-32372db74d5b" (UID: "cf096749-b5ec-4fde-a26f-32372db74d5b"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.772431 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4hjf7\" (UniqueName: \"kubernetes.io/projected/cf096749-b5ec-4fde-a26f-32372db74d5b-kube-api-access-4hjf7\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.772491 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/cf096749-b5ec-4fde-a26f-32372db74d5b-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.772505 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.772514 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/cf096749-b5ec-4fde-a26f-32372db74d5b-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.823733 4703 generic.go:334] "Generic (PLEG): container finished" podID="cf096749-b5ec-4fde-a26f-32372db74d5b" containerID="454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362" exitCode=0 Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.823783 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.823783 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" event={"ID":"cf096749-b5ec-4fde-a26f-32372db74d5b","Type":"ContainerDied","Data":"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362"} Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.823947 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77" event={"ID":"cf096749-b5ec-4fde-a26f-32372db74d5b","Type":"ContainerDied","Data":"75ab47849cd180a71c95d25fe9fc3f7ee12d016318f821cfc82e661028716952"} Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.823971 4703 scope.go:117] "RemoveContainer" containerID="454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.827143 4703 generic.go:334] "Generic (PLEG): container finished" podID="6a2de52f-007a-4674-8d59-62a3dd0f52de" containerID="8e21f8cf16a4628f53857f2a8afcde59e2d7bf5a3c9f65a0f5ef3f8e0da0bea2" exitCode=0 Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.827301 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" event={"ID":"6a2de52f-007a-4674-8d59-62a3dd0f52de","Type":"ContainerDied","Data":"8e21f8cf16a4628f53857f2a8afcde59e2d7bf5a3c9f65a0f5ef3f8e0da0bea2"} Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.843363 4703 scope.go:117] "RemoveContainer" containerID="454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362" Feb 02 12:56:00 crc kubenswrapper[4703]: E0202 12:56:00.844719 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362\": container with ID starting with 454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362 not found: ID does not exist" containerID="454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.844784 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362"} err="failed to get container status \"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362\": rpc error: code = NotFound desc = could not find container \"454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362\": container with ID starting with 454f1873fa8d2d6c4b89d9772bc77f747a9605ff3c4a98190c14afe611ec3362 not found: ID does not exist" Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.860298 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.864789 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-68884dbf87-gmb77"] Feb 02 12:56:00 crc kubenswrapper[4703]: I0202 12:56:00.907175 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.074736 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert\") pod \"6a2de52f-007a-4674-8d59-62a3dd0f52de\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.074797 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles\") pod \"6a2de52f-007a-4674-8d59-62a3dd0f52de\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.074846 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca\") pod \"6a2de52f-007a-4674-8d59-62a3dd0f52de\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.074871 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config\") pod \"6a2de52f-007a-4674-8d59-62a3dd0f52de\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.075002 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j8vcz\" (UniqueName: \"kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz\") pod \"6a2de52f-007a-4674-8d59-62a3dd0f52de\" (UID: \"6a2de52f-007a-4674-8d59-62a3dd0f52de\") " Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.075683 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca" (OuterVolumeSpecName: "client-ca") pod "6a2de52f-007a-4674-8d59-62a3dd0f52de" (UID: "6a2de52f-007a-4674-8d59-62a3dd0f52de"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.075884 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config" (OuterVolumeSpecName: "config") pod "6a2de52f-007a-4674-8d59-62a3dd0f52de" (UID: "6a2de52f-007a-4674-8d59-62a3dd0f52de"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.076042 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "6a2de52f-007a-4674-8d59-62a3dd0f52de" (UID: "6a2de52f-007a-4674-8d59-62a3dd0f52de"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.077823 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "6a2de52f-007a-4674-8d59-62a3dd0f52de" (UID: "6a2de52f-007a-4674-8d59-62a3dd0f52de"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.078290 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz" (OuterVolumeSpecName: "kube-api-access-j8vcz") pod "6a2de52f-007a-4674-8d59-62a3dd0f52de" (UID: "6a2de52f-007a-4674-8d59-62a3dd0f52de"). InnerVolumeSpecName "kube-api-access-j8vcz". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.177212 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.177259 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.177285 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/6a2de52f-007a-4674-8d59-62a3dd0f52de-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.177300 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j8vcz\" (UniqueName: \"kubernetes.io/projected/6a2de52f-007a-4674-8d59-62a3dd0f52de-kube-api-access-j8vcz\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.177316 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/6a2de52f-007a-4674-8d59-62a3dd0f52de-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265040 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265369 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="cf096749-b5ec-4fde-a26f-32372db74d5b" containerName="route-controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265385 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="cf096749-b5ec-4fde-a26f-32372db74d5b" containerName="route-controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265399 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="extract-utilities" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265406 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="extract-utilities" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265414 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265421 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265435 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="extract-content" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265441 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="extract-content" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265448 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265454 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265466 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="extract-content" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265472 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="extract-content" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265485 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a2de52f-007a-4674-8d59-62a3dd0f52de" containerName="controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265491 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a2de52f-007a-4674-8d59-62a3dd0f52de" containerName="controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265500 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" containerName="installer" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265505 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" containerName="installer" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265515 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265521 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: E0202 12:56:01.265532 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="extract-utilities" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265538 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="extract-utilities" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265639 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c6f1054c-67cd-46bb-b781-62716d75231a" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265651 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="cf096749-b5ec-4fde-a26f-32372db74d5b" containerName="route-controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265663 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f85e55b1a89d02b0cb034b1ea31ed45a" containerName="startup-monitor" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265676 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="bd22e13b-0430-4cb9-8c95-ffbce79855cf" containerName="installer" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265690 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a2de52f-007a-4674-8d59-62a3dd0f52de" containerName="controller-manager" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.265703 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="15647656-be1e-49d7-92dd-880ca1fd4d31" containerName="registry-server" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.266307 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.272826 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.273645 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.275619 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.275886 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.275940 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.276212 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.276703 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.276820 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.277870 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-whtfh\" (UniqueName: \"kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.277978 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278104 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278195 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278300 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278332 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278378 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278450 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278495 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ffk65\" (UniqueName: \"kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.278810 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.286487 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379011 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-whtfh\" (UniqueName: \"kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379071 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379105 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379133 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379150 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379171 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379190 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379208 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.379233 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ffk65\" (UniqueName: \"kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.380470 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.380833 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.381032 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.381121 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.381766 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.392203 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.392230 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.400253 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ffk65\" (UniqueName: \"kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65\") pod \"controller-manager-656665f9d-5rbfh\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.404100 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-whtfh\" (UniqueName: \"kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh\") pod \"route-controller-manager-74685fcd8b-7fk4b\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.594534 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.603498 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.829946 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.835543 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" event={"ID":"6a2de52f-007a-4674-8d59-62a3dd0f52de","Type":"ContainerDied","Data":"15fa5088c432718a6f3adad550f8c4399c358ddc92f046de90a42f6d74d2adf8"} Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.835592 4703 scope.go:117] "RemoveContainer" containerID="8e21f8cf16a4628f53857f2a8afcde59e2d7bf5a3c9f65a0f5ef3f8e0da0bea2" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.835684 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.873067 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.876603 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-765cfbfd4f-zmhhv"] Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.941154 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a2de52f-007a-4674-8d59-62a3dd0f52de" path="/var/lib/kubelet/pods/6a2de52f-007a-4674-8d59-62a3dd0f52de/volumes" Feb 02 12:56:01 crc kubenswrapper[4703]: I0202 12:56:01.941766 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="cf096749-b5ec-4fde-a26f-32372db74d5b" path="/var/lib/kubelet/pods/cf096749-b5ec-4fde-a26f-32372db74d5b/volumes" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.089561 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:02 crc kubenswrapper[4703]: W0202 12:56:02.091219 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5cf8e9aa_3a33_48e6_82f1_d0a341996ba7.slice/crio-e7378ce99967a418bb666ca81ae384f81880abf4357d2fdbb7d4334fd0d74d0e WatchSource:0}: Error finding container e7378ce99967a418bb666ca81ae384f81880abf4357d2fdbb7d4334fd0d74d0e: Status 404 returned error can't find the container with id e7378ce99967a418bb666ca81ae384f81880abf4357d2fdbb7d4334fd0d74d0e Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.842902 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" event={"ID":"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7","Type":"ContainerStarted","Data":"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9"} Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.843333 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.843347 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" event={"ID":"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7","Type":"ContainerStarted","Data":"e7378ce99967a418bb666ca81ae384f81880abf4357d2fdbb7d4334fd0d74d0e"} Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.846079 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" event={"ID":"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf","Type":"ContainerStarted","Data":"427afabcb50a00835711988d856a500bf45c54392847d99fbffe52bd4f22f135"} Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.846139 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" event={"ID":"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf","Type":"ContainerStarted","Data":"4f3f482f16599b5847bc5b9d6f1ef3b9e12ad6016e52866e43c6b3bab1348830"} Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.846312 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.848484 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.854088 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.868820 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" podStartSLOduration=2.8687976280000003 podStartE2EDuration="2.868797628s" podCreationTimestamp="2026-02-02 12:56:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:56:02.865985858 +0000 UTC m=+289.881193392" watchObservedRunningTime="2026-02-02 12:56:02.868797628 +0000 UTC m=+289.884005162" Feb 02 12:56:02 crc kubenswrapper[4703]: I0202 12:56:02.883813 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" podStartSLOduration=2.883795125 podStartE2EDuration="2.883795125s" podCreationTimestamp="2026-02-02 12:56:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:56:02.881820389 +0000 UTC m=+289.897027923" watchObservedRunningTime="2026-02-02 12:56:02.883795125 +0000 UTC m=+289.899002659" Feb 02 12:56:13 crc kubenswrapper[4703]: I0202 12:56:13.687860 4703 cert_rotation.go:91] certificate rotation detected, shutting down client connections to start using new credentials Feb 02 12:56:13 crc kubenswrapper[4703]: I0202 12:56:13.904604 4703 generic.go:334] "Generic (PLEG): container finished" podID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerID="183546ba7d2be3961f39aec48156b93ad70e1f3aa707d1dce8f581fd308401c5" exitCode=0 Feb 02 12:56:13 crc kubenswrapper[4703]: I0202 12:56:13.904654 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerDied","Data":"183546ba7d2be3961f39aec48156b93ad70e1f3aa707d1dce8f581fd308401c5"} Feb 02 12:56:13 crc kubenswrapper[4703]: I0202 12:56:13.905189 4703 scope.go:117] "RemoveContainer" containerID="183546ba7d2be3961f39aec48156b93ad70e1f3aa707d1dce8f581fd308401c5" Feb 02 12:56:14 crc kubenswrapper[4703]: I0202 12:56:14.913015 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerStarted","Data":"2dc593a4acac2ad05dc60007f763c2c68600f7d2c341d50da41aecfaa0422a59"} Feb 02 12:56:14 crc kubenswrapper[4703]: I0202 12:56:14.913791 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:56:14 crc kubenswrapper[4703]: I0202 12:56:14.917307 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.041143 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.043918 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" podUID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" containerName="controller-manager" containerID="cri-o://427afabcb50a00835711988d856a500bf45c54392847d99fbffe52bd4f22f135" gracePeriod=30 Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.044854 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.045100 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" podUID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" containerName="route-controller-manager" containerID="cri-o://4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9" gracePeriod=30 Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.868004 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.945214 4703 generic.go:334] "Generic (PLEG): container finished" podID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" containerID="4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9" exitCode=0 Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.945317 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.945320 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" event={"ID":"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7","Type":"ContainerDied","Data":"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9"} Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.945567 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b" event={"ID":"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7","Type":"ContainerDied","Data":"e7378ce99967a418bb666ca81ae384f81880abf4357d2fdbb7d4334fd0d74d0e"} Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.945605 4703 scope.go:117] "RemoveContainer" containerID="4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.947153 4703 generic.go:334] "Generic (PLEG): container finished" podID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" containerID="427afabcb50a00835711988d856a500bf45c54392847d99fbffe52bd4f22f135" exitCode=0 Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.947188 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" event={"ID":"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf","Type":"ContainerDied","Data":"427afabcb50a00835711988d856a500bf45c54392847d99fbffe52bd4f22f135"} Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.949495 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config\") pod \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.951165 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config" (OuterVolumeSpecName: "config") pod "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" (UID: "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.966249 4703 scope.go:117] "RemoveContainer" containerID="4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9" Feb 02 12:56:20 crc kubenswrapper[4703]: E0202 12:56:20.967042 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9\": container with ID starting with 4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9 not found: ID does not exist" containerID="4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9" Feb 02 12:56:20 crc kubenswrapper[4703]: I0202 12:56:20.967108 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9"} err="failed to get container status \"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9\": rpc error: code = NotFound desc = could not find container \"4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9\": container with ID starting with 4a2a8435d274c9b23002d04a3c6da67e1596cfee60929307aecd0acd846c70a9 not found: ID does not exist" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.017999 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.050624 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert\") pod \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051491 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca\") pod \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051545 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-whtfh\" (UniqueName: \"kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh\") pod \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\" (UID: \"5cf8e9aa-3a33-48e6-82f1-d0a341996ba7\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051592 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca\") pod \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051709 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config\") pod \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051767 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert\") pod \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051806 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ffk65\" (UniqueName: \"kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65\") pod \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051835 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles\") pod \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\" (UID: \"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf\") " Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.051977 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca" (OuterVolumeSpecName: "client-ca") pod "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" (UID: "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.052298 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.052317 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.052975 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config" (OuterVolumeSpecName: "config") pod "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" (UID: "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.052987 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" (UID: "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.056728 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca" (OuterVolumeSpecName: "client-ca") pod "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" (UID: "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.057464 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" (UID: "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.057616 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" (UID: "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.057992 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65" (OuterVolumeSpecName: "kube-api-access-ffk65") pod "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" (UID: "0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf"). InnerVolumeSpecName "kube-api-access-ffk65". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.058410 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh" (OuterVolumeSpecName: "kube-api-access-whtfh") pod "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" (UID: "5cf8e9aa-3a33-48e6-82f1-d0a341996ba7"). InnerVolumeSpecName "kube-api-access-whtfh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153618 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153666 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ffk65\" (UniqueName: \"kubernetes.io/projected/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-kube-api-access-ffk65\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153684 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153697 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153709 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-whtfh\" (UniqueName: \"kubernetes.io/projected/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7-kube-api-access-whtfh\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153721 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.153733 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.289054 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.293727 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:56:21 crc kubenswrapper[4703]: E0202 12:56:21.294079 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" containerName="controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.294101 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" containerName="controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: E0202 12:56:21.294112 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" containerName="route-controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.294119 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" containerName="route-controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.294385 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" containerName="route-controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.294405 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" containerName="controller-manager" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.294903 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.298185 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.304552 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.307956 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.308185 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.308373 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.308621 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.308780 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.308863 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.311023 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-7fk4b"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.318360 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.323204 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355425 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355509 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355569 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355650 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nz846\" (UniqueName: \"kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355677 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355719 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8sqwm\" (UniqueName: \"kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355741 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355778 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.355811 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456195 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456265 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456367 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nz846\" (UniqueName: \"kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456407 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456430 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8sqwm\" (UniqueName: \"kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456460 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456480 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456515 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.456543 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.457781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.457861 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.457985 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.458444 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.459750 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.461428 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.466238 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.474982 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8sqwm\" (UniqueName: \"kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm\") pod \"route-controller-manager-55d8c4565f-psj45\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.476005 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nz846\" (UniqueName: \"kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846\") pod \"controller-manager-c476db947-86vfp\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.611261 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.629846 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.939995 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5cf8e9aa-3a33-48e6-82f1-d0a341996ba7" path="/var/lib/kubelet/pods/5cf8e9aa-3a33-48e6-82f1-d0a341996ba7/volumes" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.962143 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" event={"ID":"0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf","Type":"ContainerDied","Data":"4f3f482f16599b5847bc5b9d6f1ef3b9e12ad6016e52866e43c6b3bab1348830"} Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.962567 4703 scope.go:117] "RemoveContainer" containerID="427afabcb50a00835711988d856a500bf45c54392847d99fbffe52bd4f22f135" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.962271 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-5rbfh" Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.981406 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:21 crc kubenswrapper[4703]: I0202 12:56:21.985330 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-5rbfh"] Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.064703 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:56:22 crc kubenswrapper[4703]: W0202 12:56:22.072090 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode71d7a6c_4d58_429c_a112_7015340ad3f7.slice/crio-38804d2d024501b7d4cae6c91dad8a75a82488a967d791d396c79ef22619f780 WatchSource:0}: Error finding container 38804d2d024501b7d4cae6c91dad8a75a82488a967d791d396c79ef22619f780: Status 404 returned error can't find the container with id 38804d2d024501b7d4cae6c91dad8a75a82488a967d791d396c79ef22619f780 Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.117823 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:56:22 crc kubenswrapper[4703]: W0202 12:56:22.128200 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pode48da46c_24fd_4653_b4ae_cb093cf5dbb7.slice/crio-46f969c7caec4b8bb7fad00a5368ecd22b3ae06b4b896a04ae6b4c906c35dfd7 WatchSource:0}: Error finding container 46f969c7caec4b8bb7fad00a5368ecd22b3ae06b4b896a04ae6b4c906c35dfd7: Status 404 returned error can't find the container with id 46f969c7caec4b8bb7fad00a5368ecd22b3ae06b4b896a04ae6b4c906c35dfd7 Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.972708 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" event={"ID":"e48da46c-24fd-4653-b4ae-cb093cf5dbb7","Type":"ContainerStarted","Data":"a6051541aa6e0e03f4a8c3a893325e58415f9c3c2d14c24824b456fea30b708c"} Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.972768 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" event={"ID":"e48da46c-24fd-4653-b4ae-cb093cf5dbb7","Type":"ContainerStarted","Data":"46f969c7caec4b8bb7fad00a5368ecd22b3ae06b4b896a04ae6b4c906c35dfd7"} Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.972969 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.976366 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" event={"ID":"e71d7a6c-4d58-429c-a112-7015340ad3f7","Type":"ContainerStarted","Data":"39ae9f933a062ec6a698957ea21979e2a6e066f226f3ef9f91f80625adaacdb3"} Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.976427 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" event={"ID":"e71d7a6c-4d58-429c-a112-7015340ad3f7","Type":"ContainerStarted","Data":"38804d2d024501b7d4cae6c91dad8a75a82488a967d791d396c79ef22619f780"} Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.976719 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.979597 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.982042 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:56:22 crc kubenswrapper[4703]: I0202 12:56:22.993790 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" podStartSLOduration=2.993769419 podStartE2EDuration="2.993769419s" podCreationTimestamp="2026-02-02 12:56:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:56:22.991981868 +0000 UTC m=+310.007189412" watchObservedRunningTime="2026-02-02 12:56:22.993769419 +0000 UTC m=+310.008976953" Feb 02 12:56:23 crc kubenswrapper[4703]: I0202 12:56:23.007247 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" podStartSLOduration=3.007229003 podStartE2EDuration="3.007229003s" podCreationTimestamp="2026-02-02 12:56:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:56:23.006810981 +0000 UTC m=+310.022018525" watchObservedRunningTime="2026-02-02 12:56:23.007229003 +0000 UTC m=+310.022436537" Feb 02 12:56:23 crc kubenswrapper[4703]: I0202 12:56:23.942917 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf" path="/var/lib/kubelet/pods/0b0a28f3-858e-4a72-b9ac-d7ed32ed1fcf/volumes" Feb 02 12:57:08 crc kubenswrapper[4703]: I0202 12:57:08.569594 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:57:08 crc kubenswrapper[4703]: I0202 12:57:08.571360 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-jwrr8" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="registry-server" containerID="cri-o://84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" gracePeriod=2 Feb 02 12:57:08 crc kubenswrapper[4703]: E0202 12:57:08.757490 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 is running failed: container process not found" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 12:57:08 crc kubenswrapper[4703]: E0202 12:57:08.758132 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 is running failed: container process not found" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 12:57:08 crc kubenswrapper[4703]: E0202 12:57:08.759796 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 is running failed: container process not found" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" cmd=["grpc_health_probe","-addr=:50051"] Feb 02 12:57:08 crc kubenswrapper[4703]: E0202 12:57:08.759874 4703 prober.go:104] "Probe errored" err="rpc error: code = NotFound desc = container is not created or running: checking if PID of 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 is running failed: container process not found" probeType="Readiness" pod="openshift-marketplace/redhat-operators-jwrr8" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="registry-server" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.045389 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.204233 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content\") pod \"6a692829-23d1-4ed4-954d-33ded09c5a25\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.204864 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-tnprx\" (UniqueName: \"kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx\") pod \"6a692829-23d1-4ed4-954d-33ded09c5a25\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.204978 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities\") pod \"6a692829-23d1-4ed4-954d-33ded09c5a25\" (UID: \"6a692829-23d1-4ed4-954d-33ded09c5a25\") " Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.205884 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities" (OuterVolumeSpecName: "utilities") pod "6a692829-23d1-4ed4-954d-33ded09c5a25" (UID: "6a692829-23d1-4ed4-954d-33ded09c5a25"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.211616 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx" (OuterVolumeSpecName: "kube-api-access-tnprx") pod "6a692829-23d1-4ed4-954d-33ded09c5a25" (UID: "6a692829-23d1-4ed4-954d-33ded09c5a25"). InnerVolumeSpecName "kube-api-access-tnprx". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.250350 4703 generic.go:334] "Generic (PLEG): container finished" podID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" exitCode=0 Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.250404 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerDied","Data":"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01"} Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.250413 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-jwrr8" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.250435 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-jwrr8" event={"ID":"6a692829-23d1-4ed4-954d-33ded09c5a25","Type":"ContainerDied","Data":"bde41cf39020cd08f1aeadf7d76940e006cf291a017f9dfa6ce0dcdb371cd852"} Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.250705 4703 scope.go:117] "RemoveContainer" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.270402 4703 scope.go:117] "RemoveContainer" containerID="cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.289725 4703 scope.go:117] "RemoveContainer" containerID="3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.306053 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-tnprx\" (UniqueName: \"kubernetes.io/projected/6a692829-23d1-4ed4-954d-33ded09c5a25-kube-api-access-tnprx\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.306092 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.312757 4703 scope.go:117] "RemoveContainer" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" Feb 02 12:57:09 crc kubenswrapper[4703]: E0202 12:57:09.313228 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01\": container with ID starting with 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 not found: ID does not exist" containerID="84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.313386 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01"} err="failed to get container status \"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01\": rpc error: code = NotFound desc = could not find container \"84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01\": container with ID starting with 84bb39aac1c55231db35f4e80f7b70e896a1a45290c76f44defc69e4e453ef01 not found: ID does not exist" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.313542 4703 scope.go:117] "RemoveContainer" containerID="cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0" Feb 02 12:57:09 crc kubenswrapper[4703]: E0202 12:57:09.313887 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0\": container with ID starting with cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0 not found: ID does not exist" containerID="cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.314017 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0"} err="failed to get container status \"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0\": rpc error: code = NotFound desc = could not find container \"cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0\": container with ID starting with cc18ef0eb2f4b4f1de537de7456be740605365653dc6c8abd308cf6aeb422ab0 not found: ID does not exist" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.314117 4703 scope.go:117] "RemoveContainer" containerID="3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819" Feb 02 12:57:09 crc kubenswrapper[4703]: E0202 12:57:09.314600 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819\": container with ID starting with 3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819 not found: ID does not exist" containerID="3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.314662 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819"} err="failed to get container status \"3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819\": rpc error: code = NotFound desc = could not find container \"3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819\": container with ID starting with 3f4564a47237eb18d37f69483c5cd48c55fa8272a26f45593b352ba3b5eef819 not found: ID does not exist" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.331577 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6a692829-23d1-4ed4-954d-33ded09c5a25" (UID: "6a692829-23d1-4ed4-954d-33ded09c5a25"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.407113 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6a692829-23d1-4ed4-954d-33ded09c5a25-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.578203 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.583968 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-jwrr8"] Feb 02 12:57:09 crc kubenswrapper[4703]: I0202 12:57:09.940051 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" path="/var/lib/kubelet/pods/6a692829-23d1-4ed4-954d-33ded09c5a25/volumes" Feb 02 12:57:15 crc kubenswrapper[4703]: I0202 12:57:15.985221 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:57:15 crc kubenswrapper[4703]: I0202 12:57:15.985281 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.342200 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ffrjq"] Feb 02 12:57:18 crc kubenswrapper[4703]: E0202 12:57:18.342980 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="extract-content" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.342997 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="extract-content" Feb 02 12:57:18 crc kubenswrapper[4703]: E0202 12:57:18.343019 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="registry-server" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.343026 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="registry-server" Feb 02 12:57:18 crc kubenswrapper[4703]: E0202 12:57:18.343043 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="extract-utilities" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.343049 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="extract-utilities" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.343154 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a692829-23d1-4ed4-954d-33ded09c5a25" containerName="registry-server" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.343705 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.369312 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ffrjq"] Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426112 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426189 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426218 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-tls\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426238 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-75zcl\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-kube-api-access-75zcl\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426305 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426334 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-certificates\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426357 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-trusted-ca\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.426379 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-bound-sa-token\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.449747 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526805 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-certificates\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526874 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-trusted-ca\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526902 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-bound-sa-token\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526933 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526958 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526976 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-tls\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.526993 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-75zcl\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-kube-api-access-75zcl\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.528325 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-certificates\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.528474 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-trusted-ca\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.528788 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-ca-trust-extracted\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.535022 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-registry-tls\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.538991 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-installation-pull-secrets\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.554240 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-bound-sa-token\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.554859 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-75zcl\" (UniqueName: \"kubernetes.io/projected/2c9b4cc3-0212-4a59-ae82-50ae0d5e2489-kube-api-access-75zcl\") pod \"image-registry-66df7c8f76-ffrjq\" (UID: \"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489\") " pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:18 crc kubenswrapper[4703]: I0202 12:57:18.670397 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:19 crc kubenswrapper[4703]: I0202 12:57:19.070002 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-image-registry/image-registry-66df7c8f76-ffrjq"] Feb 02 12:57:19 crc kubenswrapper[4703]: I0202 12:57:19.303382 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" event={"ID":"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489","Type":"ContainerStarted","Data":"6509db6f491c4f318c1d7246efc6a76c7fd39d3955920c77aa1228a3dc29658b"} Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.031861 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.032083 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" podUID="e71d7a6c-4d58-429c-a112-7015340ad3f7" containerName="controller-manager" containerID="cri-o://39ae9f933a062ec6a698957ea21979e2a6e066f226f3ef9f91f80625adaacdb3" gracePeriod=30 Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.055822 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.056128 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" podUID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" containerName="route-controller-manager" containerID="cri-o://a6051541aa6e0e03f4a8c3a893325e58415f9c3c2d14c24824b456fea30b708c" gracePeriod=30 Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.309062 4703 generic.go:334] "Generic (PLEG): container finished" podID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" containerID="a6051541aa6e0e03f4a8c3a893325e58415f9c3c2d14c24824b456fea30b708c" exitCode=0 Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.309149 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" event={"ID":"e48da46c-24fd-4653-b4ae-cb093cf5dbb7","Type":"ContainerDied","Data":"a6051541aa6e0e03f4a8c3a893325e58415f9c3c2d14c24824b456fea30b708c"} Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.310971 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" event={"ID":"2c9b4cc3-0212-4a59-ae82-50ae0d5e2489","Type":"ContainerStarted","Data":"80c69069a99240f90a4462f1b8142accfb71bc9ea23f307eac3ffd33b5f2776f"} Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.311112 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.312332 4703 generic.go:334] "Generic (PLEG): container finished" podID="e71d7a6c-4d58-429c-a112-7015340ad3f7" containerID="39ae9f933a062ec6a698957ea21979e2a6e066f226f3ef9f91f80625adaacdb3" exitCode=0 Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.312366 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" event={"ID":"e71d7a6c-4d58-429c-a112-7015340ad3f7","Type":"ContainerDied","Data":"39ae9f933a062ec6a698957ea21979e2a6e066f226f3ef9f91f80625adaacdb3"} Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.330594 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" podStartSLOduration=2.330566261 podStartE2EDuration="2.330566261s" podCreationTimestamp="2026-02-02 12:57:18 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:57:20.326918462 +0000 UTC m=+367.342125996" watchObservedRunningTime="2026-02-02 12:57:20.330566261 +0000 UTC m=+367.345773795" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.504094 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.509512 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664739 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert\") pod \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664811 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert\") pod \"e71d7a6c-4d58-429c-a112-7015340ad3f7\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664851 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8sqwm\" (UniqueName: \"kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm\") pod \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664908 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca\") pod \"e71d7a6c-4d58-429c-a112-7015340ad3f7\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664937 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nz846\" (UniqueName: \"kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846\") pod \"e71d7a6c-4d58-429c-a112-7015340ad3f7\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664969 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config\") pod \"e71d7a6c-4d58-429c-a112-7015340ad3f7\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.664992 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles\") pod \"e71d7a6c-4d58-429c-a112-7015340ad3f7\" (UID: \"e71d7a6c-4d58-429c-a112-7015340ad3f7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665463 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca\") pod \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665545 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config\") pod \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\" (UID: \"e48da46c-24fd-4653-b4ae-cb093cf5dbb7\") " Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665774 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca" (OuterVolumeSpecName: "client-ca") pod "e71d7a6c-4d58-429c-a112-7015340ad3f7" (UID: "e71d7a6c-4d58-429c-a112-7015340ad3f7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665827 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config" (OuterVolumeSpecName: "config") pod "e71d7a6c-4d58-429c-a112-7015340ad3f7" (UID: "e71d7a6c-4d58-429c-a112-7015340ad3f7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665784 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles" (OuterVolumeSpecName: "proxy-ca-bundles") pod "e71d7a6c-4d58-429c-a112-7015340ad3f7" (UID: "e71d7a6c-4d58-429c-a112-7015340ad3f7"). InnerVolumeSpecName "proxy-ca-bundles". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665946 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665965 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.665976 4703 reconciler_common.go:293] "Volume detached for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/e71d7a6c-4d58-429c-a112-7015340ad3f7-proxy-ca-bundles\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.666299 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca" (OuterVolumeSpecName: "client-ca") pod "e48da46c-24fd-4653-b4ae-cb093cf5dbb7" (UID: "e48da46c-24fd-4653-b4ae-cb093cf5dbb7"). InnerVolumeSpecName "client-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.666390 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config" (OuterVolumeSpecName: "config") pod "e48da46c-24fd-4653-b4ae-cb093cf5dbb7" (UID: "e48da46c-24fd-4653-b4ae-cb093cf5dbb7"). InnerVolumeSpecName "config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.671486 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846" (OuterVolumeSpecName: "kube-api-access-nz846") pod "e71d7a6c-4d58-429c-a112-7015340ad3f7" (UID: "e71d7a6c-4d58-429c-a112-7015340ad3f7"). InnerVolumeSpecName "kube-api-access-nz846". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.672136 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e48da46c-24fd-4653-b4ae-cb093cf5dbb7" (UID: "e48da46c-24fd-4653-b4ae-cb093cf5dbb7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.672232 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm" (OuterVolumeSpecName: "kube-api-access-8sqwm") pod "e48da46c-24fd-4653-b4ae-cb093cf5dbb7" (UID: "e48da46c-24fd-4653-b4ae-cb093cf5dbb7"). InnerVolumeSpecName "kube-api-access-8sqwm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.674522 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert" (OuterVolumeSpecName: "serving-cert") pod "e71d7a6c-4d58-429c-a112-7015340ad3f7" (UID: "e71d7a6c-4d58-429c-a112-7015340ad3f7"). InnerVolumeSpecName "serving-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767547 4703 reconciler_common.go:293] "Volume detached for volume \"config\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-config\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767603 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767615 4703 reconciler_common.go:293] "Volume detached for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/e71d7a6c-4d58-429c-a112-7015340ad3f7-serving-cert\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767625 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8sqwm\" (UniqueName: \"kubernetes.io/projected/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-kube-api-access-8sqwm\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767636 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nz846\" (UniqueName: \"kubernetes.io/projected/e71d7a6c-4d58-429c-a112-7015340ad3f7-kube-api-access-nz846\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:20 crc kubenswrapper[4703]: I0202 12:57:20.767644 4703 reconciler_common.go:293] "Volume detached for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/e48da46c-24fd-4653-b4ae-cb093cf5dbb7-client-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.320632 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.320671 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45" event={"ID":"e48da46c-24fd-4653-b4ae-cb093cf5dbb7","Type":"ContainerDied","Data":"46f969c7caec4b8bb7fad00a5368ecd22b3ae06b4b896a04ae6b4c906c35dfd7"} Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.320765 4703 scope.go:117] "RemoveContainer" containerID="a6051541aa6e0e03f4a8c3a893325e58415f9c3c2d14c24824b456fea30b708c" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.322006 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.322056 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-c476db947-86vfp" event={"ID":"e71d7a6c-4d58-429c-a112-7015340ad3f7","Type":"ContainerDied","Data":"38804d2d024501b7d4cae6c91dad8a75a82488a967d791d396c79ef22619f780"} Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.342227 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-4dd27"] Feb 02 12:57:21 crc kubenswrapper[4703]: E0202 12:57:21.342766 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" containerName="route-controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.342786 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" containerName="route-controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: E0202 12:57:21.342802 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e71d7a6c-4d58-429c-a112-7015340ad3f7" containerName="controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.342811 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e71d7a6c-4d58-429c-a112-7015340ad3f7" containerName="controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.342955 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" containerName="route-controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.342976 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="e71d7a6c-4d58-429c-a112-7015340ad3f7" containerName="controller-manager" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.343580 4703 scope.go:117] "RemoveContainer" containerID="39ae9f933a062ec6a698957ea21979e2a6e066f226f3ef9f91f80625adaacdb3" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.343693 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.347627 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-service-ca.crt" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.349228 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"kube-root-ca.crt" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.349435 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"openshift-controller-manager-sa-dockercfg-msq4c" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.349689 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-controller-manager"/"serving-cert" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.349921 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.350228 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"client-ca" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.350466 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"config" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.351036 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.353507 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-controller-manager"/"openshift-global-ca" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.354925 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"config" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.355291 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"serving-cert" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.355492 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"openshift-service-ca.crt" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.355715 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"client-ca" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.355926 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-route-controller-manager"/"kube-root-ca.crt" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.355983 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-route-controller-manager"/"route-controller-manager-sa-dockercfg-h2zr2" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.356264 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.376911 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-4dd27"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.406062 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.411045 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-55d8c4565f-psj45"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.419327 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.426203 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-controller-manager/controller-manager-c476db947-86vfp"] Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478083 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-config\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478154 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-client-ca\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478186 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0a6374-9350-4cec-86f6-782e84353527-serving-cert\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478216 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-config\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478402 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vbf6p\" (UniqueName: \"kubernetes.io/projected/ed0a6374-9350-4cec-86f6-782e84353527-kube-api-access-vbf6p\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478491 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5695f\" (UniqueName: \"kubernetes.io/projected/29402650-75b3-40b3-9d13-ee9e89435dbb-kube-api-access-5695f\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478578 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-client-ca\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478639 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29402650-75b3-40b3-9d13-ee9e89435dbb-serving-cert\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.478683 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-proxy-ca-bundles\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.580340 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5695f\" (UniqueName: \"kubernetes.io/projected/29402650-75b3-40b3-9d13-ee9e89435dbb-kube-api-access-5695f\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.580410 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-client-ca\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.580456 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29402650-75b3-40b3-9d13-ee9e89435dbb-serving-cert\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581068 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-proxy-ca-bundles\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581113 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-config\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581130 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-client-ca\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581146 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0a6374-9350-4cec-86f6-782e84353527-serving-cert\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581166 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-config\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581192 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vbf6p\" (UniqueName: \"kubernetes.io/projected/ed0a6374-9350-4cec-86f6-782e84353527-kube-api-access-vbf6p\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.581435 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-client-ca\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.582353 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"client-ca\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-client-ca\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.583049 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/ed0a6374-9350-4cec-86f6-782e84353527-config\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.583627 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-config\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.585423 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/29402650-75b3-40b3-9d13-ee9e89435dbb-serving-cert\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.585852 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"proxy-ca-bundles\" (UniqueName: \"kubernetes.io/configmap/29402650-75b3-40b3-9d13-ee9e89435dbb-proxy-ca-bundles\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.590086 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"serving-cert\" (UniqueName: \"kubernetes.io/secret/ed0a6374-9350-4cec-86f6-782e84353527-serving-cert\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.598483 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5695f\" (UniqueName: \"kubernetes.io/projected/29402650-75b3-40b3-9d13-ee9e89435dbb-kube-api-access-5695f\") pod \"controller-manager-656665f9d-4dd27\" (UID: \"29402650-75b3-40b3-9d13-ee9e89435dbb\") " pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.601722 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vbf6p\" (UniqueName: \"kubernetes.io/projected/ed0a6374-9350-4cec-86f6-782e84353527-kube-api-access-vbf6p\") pod \"route-controller-manager-74685fcd8b-qcvz7\" (UID: \"ed0a6374-9350-4cec-86f6-782e84353527\") " pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.687547 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.698435 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.941007 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e48da46c-24fd-4653-b4ae-cb093cf5dbb7" path="/var/lib/kubelet/pods/e48da46c-24fd-4653-b4ae-cb093cf5dbb7/volumes" Feb 02 12:57:21 crc kubenswrapper[4703]: I0202 12:57:21.941605 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e71d7a6c-4d58-429c-a112-7015340ad3f7" path="/var/lib/kubelet/pods/e71d7a6c-4d58-429c-a112-7015340ad3f7/volumes" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.143253 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7"] Feb 02 12:57:22 crc kubenswrapper[4703]: W0202 12:57:22.143894 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poded0a6374_9350_4cec_86f6_782e84353527.slice/crio-ccd3627b8c64994a3e7d9c31168f8022f45b2ba8049d1b4a5513356929b81f62 WatchSource:0}: Error finding container ccd3627b8c64994a3e7d9c31168f8022f45b2ba8049d1b4a5513356929b81f62: Status 404 returned error can't find the container with id ccd3627b8c64994a3e7d9c31168f8022f45b2ba8049d1b4a5513356929b81f62 Feb 02 12:57:22 crc kubenswrapper[4703]: W0202 12:57:22.148435 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod29402650_75b3_40b3_9d13_ee9e89435dbb.slice/crio-ca1ee844752cfeba9c86a6f904566eb56a68d6df2b18e4defb2395d487d1277e WatchSource:0}: Error finding container ca1ee844752cfeba9c86a6f904566eb56a68d6df2b18e4defb2395d487d1277e: Status 404 returned error can't find the container with id ca1ee844752cfeba9c86a6f904566eb56a68d6df2b18e4defb2395d487d1277e Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.151913 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-controller-manager/controller-manager-656665f9d-4dd27"] Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.329731 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" event={"ID":"29402650-75b3-40b3-9d13-ee9e89435dbb","Type":"ContainerStarted","Data":"ba8f0f046dec7e16cf6634a7f8082993d1384a59996b658022933e8898d7fc6a"} Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.330026 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.330045 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" event={"ID":"29402650-75b3-40b3-9d13-ee9e89435dbb","Type":"ContainerStarted","Data":"ca1ee844752cfeba9c86a6f904566eb56a68d6df2b18e4defb2395d487d1277e"} Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.331614 4703 patch_prober.go:28] interesting pod/controller-manager-656665f9d-4dd27 container/controller-manager namespace/openshift-controller-manager: Readiness probe status=failure output="Get \"https://10.217.0.69:8443/healthz\": dial tcp 10.217.0.69:8443: connect: connection refused" start-of-body= Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.331683 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" podUID="29402650-75b3-40b3-9d13-ee9e89435dbb" containerName="controller-manager" probeResult="failure" output="Get \"https://10.217.0.69:8443/healthz\": dial tcp 10.217.0.69:8443: connect: connection refused" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.335545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" event={"ID":"ed0a6374-9350-4cec-86f6-782e84353527","Type":"ContainerStarted","Data":"b3578bc506ff8b189aa9f7daff74e69a4d6ce3b835e9aa9830498c1a9aa27668"} Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.335604 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" event={"ID":"ed0a6374-9350-4cec-86f6-782e84353527","Type":"ContainerStarted","Data":"ccd3627b8c64994a3e7d9c31168f8022f45b2ba8049d1b4a5513356929b81f62"} Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.335847 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.352640 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" podStartSLOduration=2.352617364 podStartE2EDuration="2.352617364s" podCreationTimestamp="2026-02-02 12:57:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:57:22.351650767 +0000 UTC m=+369.366858301" watchObservedRunningTime="2026-02-02 12:57:22.352617364 +0000 UTC m=+369.367824918" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.372973 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" podStartSLOduration=2.3729390759999998 podStartE2EDuration="2.372939076s" podCreationTimestamp="2026-02-02 12:57:20 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:57:22.37050394 +0000 UTC m=+369.385711484" watchObservedRunningTime="2026-02-02 12:57:22.372939076 +0000 UTC m=+369.388146620" Feb 02 12:57:22 crc kubenswrapper[4703]: I0202 12:57:22.906838 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-route-controller-manager/route-controller-manager-74685fcd8b-qcvz7" Feb 02 12:57:23 crc kubenswrapper[4703]: I0202 12:57:23.347086 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-controller-manager/controller-manager-656665f9d-4dd27" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.701508 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.709131 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.709452 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-zf6z4" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="registry-server" containerID="cri-o://f3a3f1c75cf560db7ce3523779ccdbd6b9e69c9d344ec577a3be4f9ec262686e" gracePeriod=30 Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.709765 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-kbjrd" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="registry-server" containerID="cri-o://4352b631e36a635d35fb1ea278f35e10e9e1f9e66e312a0e6d3ec61e173353ae" gracePeriod=30 Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.717813 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.718123 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" containerID="cri-o://2dc593a4acac2ad05dc60007f763c2c68600f7d2c341d50da41aecfaa0422a59" gracePeriod=30 Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.741715 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.742080 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-7c4qb" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="registry-server" containerID="cri-o://7146b5a7e28d5f2788438a1896aff9e2b7a08a0c9cf929c4c06954b9ee11193b" gracePeriod=30 Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.754144 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.754452 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-rzrkb" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="registry-server" containerID="cri-o://af2a2368d9fb50020651acde1c3ac3f21de8bf90c4dd9b1d43e8769e48d10985" gracePeriod=30 Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.758213 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tbv5t"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.759102 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.771139 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tbv5t"] Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.838228 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.838326 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.838477 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-s8j5z\" (UniqueName: \"kubernetes.io/projected/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-kube-api-access-s8j5z\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.939513 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.939601 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-s8j5z\" (UniqueName: \"kubernetes.io/projected/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-kube-api-access-s8j5z\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.939631 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.940754 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-trusted-ca\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.946115 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-marketplace-operator-metrics\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:31 crc kubenswrapper[4703]: I0202 12:57:31.957900 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-s8j5z\" (UniqueName: \"kubernetes.io/projected/71ff2212-c3fe-45e2-87ef-a4d4e19ce91d-kube-api-access-s8j5z\") pod \"marketplace-operator-79b997595-tbv5t\" (UID: \"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d\") " pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.084534 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.396180 4703 generic.go:334] "Generic (PLEG): container finished" podID="0331857a-b571-4128-9927-b2cc5dd58969" containerID="af2a2368d9fb50020651acde1c3ac3f21de8bf90c4dd9b1d43e8769e48d10985" exitCode=0 Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.396231 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerDied","Data":"af2a2368d9fb50020651acde1c3ac3f21de8bf90c4dd9b1d43e8769e48d10985"} Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.407581 4703 generic.go:334] "Generic (PLEG): container finished" podID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerID="f3a3f1c75cf560db7ce3523779ccdbd6b9e69c9d344ec577a3be4f9ec262686e" exitCode=0 Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.407678 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerDied","Data":"f3a3f1c75cf560db7ce3523779ccdbd6b9e69c9d344ec577a3be4f9ec262686e"} Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.415574 4703 generic.go:334] "Generic (PLEG): container finished" podID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerID="7146b5a7e28d5f2788438a1896aff9e2b7a08a0c9cf929c4c06954b9ee11193b" exitCode=0 Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.415645 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerDied","Data":"7146b5a7e28d5f2788438a1896aff9e2b7a08a0c9cf929c4c06954b9ee11193b"} Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.418864 4703 generic.go:334] "Generic (PLEG): container finished" podID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerID="4352b631e36a635d35fb1ea278f35e10e9e1f9e66e312a0e6d3ec61e173353ae" exitCode=0 Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.418929 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerDied","Data":"4352b631e36a635d35fb1ea278f35e10e9e1f9e66e312a0e6d3ec61e173353ae"} Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.421318 4703 generic.go:334] "Generic (PLEG): container finished" podID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerID="2dc593a4acac2ad05dc60007f763c2c68600f7d2c341d50da41aecfaa0422a59" exitCode=0 Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.421373 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerDied","Data":"2dc593a4acac2ad05dc60007f763c2c68600f7d2c341d50da41aecfaa0422a59"} Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.421437 4703 scope.go:117] "RemoveContainer" containerID="183546ba7d2be3961f39aec48156b93ad70e1f3aa707d1dce8f581fd308401c5" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.550672 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-tbv5t"] Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.735749 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.852486 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content\") pod \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.852571 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities\") pod \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.852726 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-b58px\" (UniqueName: \"kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px\") pod \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\" (UID: \"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d\") " Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.862050 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities" (OuterVolumeSpecName: "utilities") pod "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" (UID: "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.869059 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px" (OuterVolumeSpecName: "kube-api-access-b58px") pod "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" (UID: "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d"). InnerVolumeSpecName "kube-api-access-b58px". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.907321 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" (UID: "ae6cfab9-b3ad-4791-bb0f-08b264c22a3d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.954588 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-b58px\" (UniqueName: \"kubernetes.io/projected/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-kube-api-access-b58px\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.954633 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:32 crc kubenswrapper[4703]: I0202 12:57:32.954643 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.196444 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.201718 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.208305 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.213757 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257570 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content\") pod \"0331857a-b571-4128-9927-b2cc5dd58969\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257687 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kb4p8\" (UniqueName: \"kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8\") pod \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257730 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content\") pod \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257756 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca\") pod \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257810 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities\") pod \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257842 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics\") pod \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257872 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zwnl7\" (UniqueName: \"kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7\") pod \"0331857a-b571-4128-9927-b2cc5dd58969\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257905 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8t5z5\" (UniqueName: \"kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5\") pod \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\" (UID: \"7b2bcd29-093a-439f-b3ed-e2aef1ae6904\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257935 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities\") pod \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.257996 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities\") pod \"0331857a-b571-4128-9927-b2cc5dd58969\" (UID: \"0331857a-b571-4128-9927-b2cc5dd58969\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.258029 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-j6jjs\" (UniqueName: \"kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs\") pod \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\" (UID: \"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.258053 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content\") pod \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\" (UID: \"28bd7c5d-2710-4c7c-af7b-f594c8d3352c\") " Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.258616 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities" (OuterVolumeSpecName: "utilities") pod "7b2bcd29-093a-439f-b3ed-e2aef1ae6904" (UID: "7b2bcd29-093a-439f-b3ed-e2aef1ae6904"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.259598 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities" (OuterVolumeSpecName: "utilities") pod "0331857a-b571-4128-9927-b2cc5dd58969" (UID: "0331857a-b571-4128-9927-b2cc5dd58969"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.260190 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities" (OuterVolumeSpecName: "utilities") pod "28bd7c5d-2710-4c7c-af7b-f594c8d3352c" (UID: "28bd7c5d-2710-4c7c-af7b-f594c8d3352c"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.260308 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca" (OuterVolumeSpecName: "marketplace-trusted-ca") pod "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" (UID: "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d"). InnerVolumeSpecName "marketplace-trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.270295 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8" (OuterVolumeSpecName: "kube-api-access-kb4p8") pod "28bd7c5d-2710-4c7c-af7b-f594c8d3352c" (UID: "28bd7c5d-2710-4c7c-af7b-f594c8d3352c"). InnerVolumeSpecName "kube-api-access-kb4p8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.270335 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7" (OuterVolumeSpecName: "kube-api-access-zwnl7") pod "0331857a-b571-4128-9927-b2cc5dd58969" (UID: "0331857a-b571-4128-9927-b2cc5dd58969"). InnerVolumeSpecName "kube-api-access-zwnl7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.272821 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics" (OuterVolumeSpecName: "marketplace-operator-metrics") pod "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" (UID: "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d"). InnerVolumeSpecName "marketplace-operator-metrics". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.273368 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs" (OuterVolumeSpecName: "kube-api-access-j6jjs") pod "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" (UID: "fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d"). InnerVolumeSpecName "kube-api-access-j6jjs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.274121 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5" (OuterVolumeSpecName: "kube-api-access-8t5z5") pod "7b2bcd29-093a-439f-b3ed-e2aef1ae6904" (UID: "7b2bcd29-093a-439f-b3ed-e2aef1ae6904"). InnerVolumeSpecName "kube-api-access-8t5z5". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.315419 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "7b2bcd29-093a-439f-b3ed-e2aef1ae6904" (UID: "7b2bcd29-093a-439f-b3ed-e2aef1ae6904"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368800 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kb4p8\" (UniqueName: \"kubernetes.io/projected/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-kube-api-access-kb4p8\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368865 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368882 4703 reconciler_common.go:293] "Volume detached for volume \"marketplace-trusted-ca\" (UniqueName: \"kubernetes.io/configmap/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368895 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368918 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zwnl7\" (UniqueName: \"kubernetes.io/projected/0331857a-b571-4128-9927-b2cc5dd58969-kube-api-access-zwnl7\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368932 4703 reconciler_common.go:293] "Volume detached for volume \"marketplace-operator-metrics\" (UniqueName: \"kubernetes.io/secret/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-marketplace-operator-metrics\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368951 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8t5z5\" (UniqueName: \"kubernetes.io/projected/7b2bcd29-093a-439f-b3ed-e2aef1ae6904-kube-api-access-8t5z5\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368966 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368984 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.368997 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-j6jjs\" (UniqueName: \"kubernetes.io/projected/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d-kube-api-access-j6jjs\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.407862 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "28bd7c5d-2710-4c7c-af7b-f594c8d3352c" (UID: "28bd7c5d-2710-4c7c-af7b-f594c8d3352c"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.440902 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-zf6z4" event={"ID":"28bd7c5d-2710-4c7c-af7b-f594c8d3352c","Type":"ContainerDied","Data":"bd9a6cc9cc35dbda142633aa3f407634746cfd2d3ad895007b8457a86527b35a"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.440961 4703 scope.go:117] "RemoveContainer" containerID="f3a3f1c75cf560db7ce3523779ccdbd6b9e69c9d344ec577a3be4f9ec262686e" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.441106 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-zf6z4" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.449896 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-7c4qb" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.450949 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-7c4qb" event={"ID":"7b2bcd29-093a-439f-b3ed-e2aef1ae6904","Type":"ContainerDied","Data":"be79909a446c6cbd804d164fc3928c82e01dab48fbb404bd2fa7bd31fceee2e3"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.454546 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-kbjrd" event={"ID":"ae6cfab9-b3ad-4791-bb0f-08b264c22a3d","Type":"ContainerDied","Data":"1462caadd7b21ee3b077c72dc6d2e86ec0c5921a7a9b81c387952d9ac4abbc91"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.454641 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-kbjrd" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.456257 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" event={"ID":"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d","Type":"ContainerStarted","Data":"1018923d3029f794eec07165becffdbf4c7fefc8d58fa3d5adfb39366e91feea"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.456344 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" event={"ID":"71ff2212-c3fe-45e2-87ef-a4d4e19ce91d","Type":"ContainerStarted","Data":"a28bfac2daf0fa8bc3b9fd08fe3bbde7663bb20194fe110efe114dcd1e8602e6"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.457532 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.464751 4703 scope.go:117] "RemoveContainer" containerID="d1f79b97d4b4e76caf43ac07d34154e990756e663deb1760e360fd37626c60ae" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.465305 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.465365 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/marketplace-operator-79b997595-rxr5v" event={"ID":"fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d","Type":"ContainerDied","Data":"c8f4804fd124d3d6cc3b7e593a946a7367770d264189d44ed191912849ef4d5e"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.470737 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.472548 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-rzrkb" event={"ID":"0331857a-b571-4128-9927-b2cc5dd58969","Type":"ContainerDied","Data":"857983b80f77874fe31978f30b88c7736aaf8e0dfbf482093daa28964b60197f"} Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.472736 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-rzrkb" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.473697 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/28bd7c5d-2710-4c7c-af7b-f594c8d3352c-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.487299 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/marketplace-operator-79b997595-tbv5t" podStartSLOduration=2.487263342 podStartE2EDuration="2.487263342s" podCreationTimestamp="2026-02-02 12:57:31 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 12:57:33.480155109 +0000 UTC m=+380.495362643" watchObservedRunningTime="2026-02-02 12:57:33.487263342 +0000 UTC m=+380.502470876" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.505068 4703 scope.go:117] "RemoveContainer" containerID="41350c444ae8c56273d87552085cf1ee20135ae2c2fd03466d7543796c2f8ab5" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.509957 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "0331857a-b571-4128-9927-b2cc5dd58969" (UID: "0331857a-b571-4128-9927-b2cc5dd58969"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.540160 4703 scope.go:117] "RemoveContainer" containerID="7146b5a7e28d5f2788438a1896aff9e2b7a08a0c9cf929c4c06954b9ee11193b" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.558412 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.565404 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-zf6z4"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.570542 4703 scope.go:117] "RemoveContainer" containerID="1b29849f4cf9b8ee6eeb897f955c2cafb2bd05e3d5660507396d4db5cd3c9b2f" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.575758 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/0331857a-b571-4128-9927-b2cc5dd58969-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.579448 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.592396 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-7c4qb"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.596397 4703 scope.go:117] "RemoveContainer" containerID="22062f341ab3d02fea37e8eba60cc9190d9f3513b44ce3c74e499cfe29bbf758" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.608422 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.609959 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-kbjrd"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.616958 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.619863 4703 scope.go:117] "RemoveContainer" containerID="4352b631e36a635d35fb1ea278f35e10e9e1f9e66e312a0e6d3ec61e173353ae" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.621738 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/marketplace-operator-79b997595-rxr5v"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.651376 4703 scope.go:117] "RemoveContainer" containerID="557a7b680a3b7c85ce50e22133d18f1ec2ed7ab52bda65a4272bcf8248d49257" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.679113 4703 scope.go:117] "RemoveContainer" containerID="3c6bad9340025e13170315751ebb42512380a1f6ca2b47ab1f45316dae5ad4ee" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.708411 4703 scope.go:117] "RemoveContainer" containerID="2dc593a4acac2ad05dc60007f763c2c68600f7d2c341d50da41aecfaa0422a59" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.727855 4703 scope.go:117] "RemoveContainer" containerID="af2a2368d9fb50020651acde1c3ac3f21de8bf90c4dd9b1d43e8769e48d10985" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.745706 4703 scope.go:117] "RemoveContainer" containerID="aab0997c56c0062bc25c3c3e7d0e3494866fefa75dfb386781781c65bb984fc2" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.762732 4703 scope.go:117] "RemoveContainer" containerID="a77513de6f0577a48d44d7d0636cf7b9bc936073798792d2e2886ccceaab1ee3" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.802423 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.809607 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-rzrkb"] Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.944363 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="0331857a-b571-4128-9927-b2cc5dd58969" path="/var/lib/kubelet/pods/0331857a-b571-4128-9927-b2cc5dd58969/volumes" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.945181 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" path="/var/lib/kubelet/pods/28bd7c5d-2710-4c7c-af7b-f594c8d3352c/volumes" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.945972 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" path="/var/lib/kubelet/pods/7b2bcd29-093a-439f-b3ed-e2aef1ae6904/volumes" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.947377 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" path="/var/lib/kubelet/pods/ae6cfab9-b3ad-4791-bb0f-08b264c22a3d/volumes" Feb 02 12:57:33 crc kubenswrapper[4703]: I0202 12:57:33.948173 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" path="/var/lib/kubelet/pods/fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d/volumes" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055342 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055711 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055732 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055745 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055789 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055799 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055808 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055821 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055831 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055856 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055865 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055880 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055888 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055899 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055906 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055915 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055923 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055934 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055944 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="extract-content" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055951 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055957 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055970 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055977 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.055985 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.055991 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.056005 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056013 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="extract-utilities" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056143 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="28bd7c5d-2710-4c7c-af7b-f594c8d3352c" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056163 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056173 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056187 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="ae6cfab9-b3ad-4791-bb0f-08b264c22a3d" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056200 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="0331857a-b571-4128-9927-b2cc5dd58969" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056209 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="7b2bcd29-093a-439f-b3ed-e2aef1ae6904" containerName="registry-server" Feb 02 12:57:34 crc kubenswrapper[4703]: E0202 12:57:34.056343 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.056354 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fa37052c-0d36-4b31-9e1c-59ef2bd1ff7d" containerName="marketplace-operator" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.057175 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.061253 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"certified-operators-dockercfg-4rs5g" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.067795 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.194508 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wrjv6\" (UniqueName: \"kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.194840 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.194996 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.296204 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wrjv6\" (UniqueName: \"kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.296307 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.296338 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.296998 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.297016 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.317391 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wrjv6\" (UniqueName: \"kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6\") pod \"certified-operators-mfzvq\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:34 crc kubenswrapper[4703]: I0202 12:57:34.376906 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.663324 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6h4"] Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.664993 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.668387 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-marketplace-dockercfg-x2ctb" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.680064 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6h4"] Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.807845 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-utilities\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.807942 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dz2cl\" (UniqueName: \"kubernetes.io/projected/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-kube-api-access-dz2cl\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.808084 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-catalog-content\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.909856 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-utilities\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.909943 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dz2cl\" (UniqueName: \"kubernetes.io/projected/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-kube-api-access-dz2cl\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.909993 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-catalog-content\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.910717 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-catalog-content\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.910748 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-utilities\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.933390 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dz2cl\" (UniqueName: \"kubernetes.io/projected/3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3-kube-api-access-dz2cl\") pod \"redhat-marketplace-kg6h4\" (UID: \"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3\") " pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:34.997213 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:35.517646 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 12:57:35 crc kubenswrapper[4703]: I0202 12:57:35.579123 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-kg6h4"] Feb 02 12:57:35 crc kubenswrapper[4703]: W0202 12:57:35.593892 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3cd4a4fe_0b9f_4f64_834a_df0a1f6a3bb3.slice/crio-a068091bfb3112404d27c1f0421d901ca7051ee1cbbc32bec2685425f11e1635 WatchSource:0}: Error finding container a068091bfb3112404d27c1f0421d901ca7051ee1cbbc32bec2685425f11e1635: Status 404 returned error can't find the container with id a068091bfb3112404d27c1f0421d901ca7051ee1cbbc32bec2685425f11e1635 Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.454298 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-m8xxx"] Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.455976 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.463726 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"community-operators-dockercfg-dmngl" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.469655 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m8xxx"] Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.508617 4703 generic.go:334] "Generic (PLEG): container finished" podID="3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3" containerID="47a3e9a996ea5816b1c71d42c53b5909866219a89d9475e0055e57df07a8954b" exitCode=0 Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.508699 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6h4" event={"ID":"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3","Type":"ContainerDied","Data":"47a3e9a996ea5816b1c71d42c53b5909866219a89d9475e0055e57df07a8954b"} Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.508754 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6h4" event={"ID":"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3","Type":"ContainerStarted","Data":"a068091bfb3112404d27c1f0421d901ca7051ee1cbbc32bec2685425f11e1635"} Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.513138 4703 generic.go:334] "Generic (PLEG): container finished" podID="4c522108-2465-4905-9703-5dfd173bafdb" containerID="ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0" exitCode=0 Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.513184 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerDied","Data":"ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0"} Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.513208 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerStarted","Data":"3cbc9061bcc60d825b5054f71a3f4ea3aa0457bc6c72a067b1611b4583969d27"} Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.536097 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-utilities\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.536176 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-w8m9c\" (UniqueName: \"kubernetes.io/projected/1a77f425-fdf2-44a5-8fac-d41dc0307d14-kube-api-access-w8m9c\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.536206 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-catalog-content\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.637993 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-w8m9c\" (UniqueName: \"kubernetes.io/projected/1a77f425-fdf2-44a5-8fac-d41dc0307d14-kube-api-access-w8m9c\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.638081 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-catalog-content\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.638151 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-utilities\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.638816 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-catalog-content\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.638900 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1a77f425-fdf2-44a5-8fac-d41dc0307d14-utilities\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.666748 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-w8m9c\" (UniqueName: \"kubernetes.io/projected/1a77f425-fdf2-44a5-8fac-d41dc0307d14-kube-api-access-w8m9c\") pod \"community-operators-m8xxx\" (UID: \"1a77f425-fdf2-44a5-8fac-d41dc0307d14\") " pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:36 crc kubenswrapper[4703]: I0202 12:57:36.788936 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.052504 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-grsx4"] Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.054302 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.060819 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"redhat-operators-dockercfg-ct8rh" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.075370 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-grsx4"] Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.149118 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-catalog-content\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.149192 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-2dplp\" (UniqueName: \"kubernetes.io/projected/4d7077d1-f842-4918-83cd-cf46ec77a8b7-kube-api-access-2dplp\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.149255 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-utilities\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.209039 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-m8xxx"] Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.250613 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-utilities\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.250713 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-catalog-content\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.250754 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-2dplp\" (UniqueName: \"kubernetes.io/projected/4d7077d1-f842-4918-83cd-cf46ec77a8b7-kube-api-access-2dplp\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.251794 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-utilities\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.252054 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4d7077d1-f842-4918-83cd-cf46ec77a8b7-catalog-content\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.277156 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-2dplp\" (UniqueName: \"kubernetes.io/projected/4d7077d1-f842-4918-83cd-cf46ec77a8b7-kube-api-access-2dplp\") pod \"redhat-operators-grsx4\" (UID: \"4d7077d1-f842-4918-83cd-cf46ec77a8b7\") " pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.386243 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.524665 4703 generic.go:334] "Generic (PLEG): container finished" podID="3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3" containerID="5650a1c7c98b515c51d5c0eff0d732e5d4155c6abd32ffb196dd414758fdacc5" exitCode=0 Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.524787 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6h4" event={"ID":"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3","Type":"ContainerDied","Data":"5650a1c7c98b515c51d5c0eff0d732e5d4155c6abd32ffb196dd414758fdacc5"} Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.528120 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerStarted","Data":"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30"} Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.532031 4703 generic.go:334] "Generic (PLEG): container finished" podID="1a77f425-fdf2-44a5-8fac-d41dc0307d14" containerID="70b5cc08bc2a0921332bad54f402c7f0039dc9e6469150664e962f754b9d1ec2" exitCode=0 Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.532097 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m8xxx" event={"ID":"1a77f425-fdf2-44a5-8fac-d41dc0307d14","Type":"ContainerDied","Data":"70b5cc08bc2a0921332bad54f402c7f0039dc9e6469150664e962f754b9d1ec2"} Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.532144 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m8xxx" event={"ID":"1a77f425-fdf2-44a5-8fac-d41dc0307d14","Type":"ContainerStarted","Data":"cb39ce6aefc9e5398ad27290a0d793beeca37af28891451a9b07a3674514f89b"} Feb 02 12:57:37 crc kubenswrapper[4703]: I0202 12:57:37.839615 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-grsx4"] Feb 02 12:57:37 crc kubenswrapper[4703]: W0202 12:57:37.846047 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d7077d1_f842_4918_83cd_cf46ec77a8b7.slice/crio-d5e27df9067c63196165b9cc282da9cbc76e43566f3164d070c823ea83de4e9a WatchSource:0}: Error finding container d5e27df9067c63196165b9cc282da9cbc76e43566f3164d070c823ea83de4e9a: Status 404 returned error can't find the container with id d5e27df9067c63196165b9cc282da9cbc76e43566f3164d070c823ea83de4e9a Feb 02 12:57:38 crc kubenswrapper[4703]: E0202 12:57:38.064789 4703 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d7077d1_f842_4918_83cd_cf46ec77a8b7.slice/crio-1cec29a27e592ffd932fdc28b6d185f47d35df709a8145be39041f099b4dddf7.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod4d7077d1_f842_4918_83cd_cf46ec77a8b7.slice/crio-conmon-1cec29a27e592ffd932fdc28b6d185f47d35df709a8145be39041f099b4dddf7.scope\": RecentStats: unable to find data in memory cache]" Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.540917 4703 generic.go:334] "Generic (PLEG): container finished" podID="1a77f425-fdf2-44a5-8fac-d41dc0307d14" containerID="52df0b36b52497fa16a33d5ef6bdb18ab03d91571886fde7672cd5c43b051f69" exitCode=0 Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.541008 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m8xxx" event={"ID":"1a77f425-fdf2-44a5-8fac-d41dc0307d14","Type":"ContainerDied","Data":"52df0b36b52497fa16a33d5ef6bdb18ab03d91571886fde7672cd5c43b051f69"} Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.544713 4703 generic.go:334] "Generic (PLEG): container finished" podID="4d7077d1-f842-4918-83cd-cf46ec77a8b7" containerID="1cec29a27e592ffd932fdc28b6d185f47d35df709a8145be39041f099b4dddf7" exitCode=0 Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.544797 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-grsx4" event={"ID":"4d7077d1-f842-4918-83cd-cf46ec77a8b7","Type":"ContainerDied","Data":"1cec29a27e592ffd932fdc28b6d185f47d35df709a8145be39041f099b4dddf7"} Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.544822 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-grsx4" event={"ID":"4d7077d1-f842-4918-83cd-cf46ec77a8b7","Type":"ContainerStarted","Data":"d5e27df9067c63196165b9cc282da9cbc76e43566f3164d070c823ea83de4e9a"} Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.555096 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-kg6h4" event={"ID":"3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3","Type":"ContainerStarted","Data":"6fe7851ca77fbe576275cf15265ca75e2fe1947f19ce79323a088fa767588059"} Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.557696 4703 generic.go:334] "Generic (PLEG): container finished" podID="4c522108-2465-4905-9703-5dfd173bafdb" containerID="efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30" exitCode=0 Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.557749 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerDied","Data":"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30"} Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.654619 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-kg6h4" podStartSLOduration=3.2046183360000002 podStartE2EDuration="4.654604001s" podCreationTimestamp="2026-02-02 12:57:34 +0000 UTC" firstStartedPulling="2026-02-02 12:57:36.511934931 +0000 UTC m=+383.527142465" lastFinishedPulling="2026-02-02 12:57:37.961920596 +0000 UTC m=+384.977128130" observedRunningTime="2026-02-02 12:57:38.65163371 +0000 UTC m=+385.666841274" watchObservedRunningTime="2026-02-02 12:57:38.654604001 +0000 UTC m=+385.669811535" Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.677810 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-image-registry/image-registry-66df7c8f76-ffrjq" Feb 02 12:57:38 crc kubenswrapper[4703]: I0202 12:57:38.744435 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:57:39 crc kubenswrapper[4703]: I0202 12:57:39.567510 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-m8xxx" event={"ID":"1a77f425-fdf2-44a5-8fac-d41dc0307d14","Type":"ContainerStarted","Data":"6c8aeff2f445466305c6c4cf6412b58dd7367c3f86f80dd28b7f453414f29c4a"} Feb 02 12:57:39 crc kubenswrapper[4703]: I0202 12:57:39.571630 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerStarted","Data":"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c"} Feb 02 12:57:39 crc kubenswrapper[4703]: I0202 12:57:39.598777 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-m8xxx" podStartSLOduration=2.073635371 podStartE2EDuration="3.598748779s" podCreationTimestamp="2026-02-02 12:57:36 +0000 UTC" firstStartedPulling="2026-02-02 12:57:37.537560943 +0000 UTC m=+384.552768477" lastFinishedPulling="2026-02-02 12:57:39.062674351 +0000 UTC m=+386.077881885" observedRunningTime="2026-02-02 12:57:39.597144496 +0000 UTC m=+386.612352030" watchObservedRunningTime="2026-02-02 12:57:39.598748779 +0000 UTC m=+386.613956313" Feb 02 12:57:39 crc kubenswrapper[4703]: I0202 12:57:39.625455 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-mfzvq" podStartSLOduration=2.982250782 podStartE2EDuration="5.625421374s" podCreationTimestamp="2026-02-02 12:57:34 +0000 UTC" firstStartedPulling="2026-02-02 12:57:36.517636456 +0000 UTC m=+383.532844010" lastFinishedPulling="2026-02-02 12:57:39.160807068 +0000 UTC m=+386.176014602" observedRunningTime="2026-02-02 12:57:39.62230219 +0000 UTC m=+386.637509744" watchObservedRunningTime="2026-02-02 12:57:39.625421374 +0000 UTC m=+386.640628908" Feb 02 12:57:40 crc kubenswrapper[4703]: I0202 12:57:40.579609 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-grsx4" event={"ID":"4d7077d1-f842-4918-83cd-cf46ec77a8b7","Type":"ContainerStarted","Data":"2c3761dab839ebb7c11ae276566ac66193950d1966f16f52d384c8e6dae03ece"} Feb 02 12:57:41 crc kubenswrapper[4703]: I0202 12:57:41.588773 4703 generic.go:334] "Generic (PLEG): container finished" podID="4d7077d1-f842-4918-83cd-cf46ec77a8b7" containerID="2c3761dab839ebb7c11ae276566ac66193950d1966f16f52d384c8e6dae03ece" exitCode=0 Feb 02 12:57:41 crc kubenswrapper[4703]: I0202 12:57:41.588817 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-grsx4" event={"ID":"4d7077d1-f842-4918-83cd-cf46ec77a8b7","Type":"ContainerDied","Data":"2c3761dab839ebb7c11ae276566ac66193950d1966f16f52d384c8e6dae03ece"} Feb 02 12:57:42 crc kubenswrapper[4703]: I0202 12:57:42.597312 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-grsx4" event={"ID":"4d7077d1-f842-4918-83cd-cf46ec77a8b7","Type":"ContainerStarted","Data":"23230f54bcc7b9642083e35c7460fa4d7c1239950f737654584449b867feaf3f"} Feb 02 12:57:42 crc kubenswrapper[4703]: I0202 12:57:42.616069 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-grsx4" podStartSLOduration=2.175593059 podStartE2EDuration="5.616049758s" podCreationTimestamp="2026-02-02 12:57:37 +0000 UTC" firstStartedPulling="2026-02-02 12:57:38.546151473 +0000 UTC m=+385.561359007" lastFinishedPulling="2026-02-02 12:57:41.986608172 +0000 UTC m=+389.001815706" observedRunningTime="2026-02-02 12:57:42.61500345 +0000 UTC m=+389.630210984" watchObservedRunningTime="2026-02-02 12:57:42.616049758 +0000 UTC m=+389.631257292" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.377792 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.378323 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.438505 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.665685 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.997889 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:44 crc kubenswrapper[4703]: I0202 12:57:44.997981 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:45 crc kubenswrapper[4703]: I0202 12:57:45.062479 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:45 crc kubenswrapper[4703]: I0202 12:57:45.660937 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-kg6h4" Feb 02 12:57:45 crc kubenswrapper[4703]: I0202 12:57:45.984864 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:57:45 crc kubenswrapper[4703]: I0202 12:57:45.984937 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:57:46 crc kubenswrapper[4703]: I0202 12:57:46.789467 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:46 crc kubenswrapper[4703]: I0202 12:57:46.789537 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:46 crc kubenswrapper[4703]: I0202 12:57:46.836661 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:47 crc kubenswrapper[4703]: I0202 12:57:47.387436 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:47 crc kubenswrapper[4703]: I0202 12:57:47.387501 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:47 crc kubenswrapper[4703]: I0202 12:57:47.676511 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-m8xxx" Feb 02 12:57:48 crc kubenswrapper[4703]: I0202 12:57:48.429096 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/redhat-operators-grsx4" podUID="4d7077d1-f842-4918-83cd-cf46ec77a8b7" containerName="registry-server" probeResult="failure" output=< Feb 02 12:57:48 crc kubenswrapper[4703]: timeout: failed to connect service ":50051" within 1s Feb 02 12:57:48 crc kubenswrapper[4703]: > Feb 02 12:57:57 crc kubenswrapper[4703]: I0202 12:57:57.427723 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:57:57 crc kubenswrapper[4703]: I0202 12:57:57.469512 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-grsx4" Feb 02 12:58:03 crc kubenswrapper[4703]: I0202 12:58:03.806555 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" podUID="5654372a-377f-47b2-a476-6f1a55395e6c" containerName="registry" containerID="cri-o://b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058" gracePeriod=30 Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.255455 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417217 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417293 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417483 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-storage\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417520 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dj8dl\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417568 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417649 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417705 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.417797 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates\") pod \"5654372a-377f-47b2-a476-6f1a55395e6c\" (UID: \"5654372a-377f-47b2-a476-6f1a55395e6c\") " Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.418555 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca" (OuterVolumeSpecName: "trusted-ca") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "trusted-ca". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.418811 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates" (OuterVolumeSpecName: "registry-certificates") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "registry-certificates". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.428530 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets" (OuterVolumeSpecName: "installation-pull-secrets") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "installation-pull-secrets". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.428679 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls" (OuterVolumeSpecName: "registry-tls") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "registry-tls". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.429222 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token" (OuterVolumeSpecName: "bound-sa-token") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "bound-sa-token". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.430623 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl" (OuterVolumeSpecName: "kube-api-access-dj8dl") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "kube-api-access-dj8dl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.431675 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8" (OuterVolumeSpecName: "registry-storage") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "pvc-657094db-63f1-4ba8-9a24-edca0e80b7a8". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.435437 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted" (OuterVolumeSpecName: "ca-trust-extracted") pod "5654372a-377f-47b2-a476-6f1a55395e6c" (UID: "5654372a-377f-47b2-a476-6f1a55395e6c"). InnerVolumeSpecName "ca-trust-extracted". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519371 4703 reconciler_common.go:293] "Volume detached for volume \"installation-pull-secrets\" (UniqueName: \"kubernetes.io/secret/5654372a-377f-47b2-a476-6f1a55395e6c-installation-pull-secrets\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519432 4703 reconciler_common.go:293] "Volume detached for volume \"ca-trust-extracted\" (UniqueName: \"kubernetes.io/empty-dir/5654372a-377f-47b2-a476-6f1a55395e6c-ca-trust-extracted\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519445 4703 reconciler_common.go:293] "Volume detached for volume \"bound-sa-token\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-bound-sa-token\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519459 4703 reconciler_common.go:293] "Volume detached for volume \"registry-certificates\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-registry-certificates\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519473 4703 reconciler_common.go:293] "Volume detached for volume \"trusted-ca\" (UniqueName: \"kubernetes.io/configmap/5654372a-377f-47b2-a476-6f1a55395e6c-trusted-ca\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519485 4703 reconciler_common.go:293] "Volume detached for volume \"registry-tls\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-registry-tls\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.519496 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dj8dl\" (UniqueName: \"kubernetes.io/projected/5654372a-377f-47b2-a476-6f1a55395e6c-kube-api-access-dj8dl\") on node \"crc\" DevicePath \"\"" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.738568 4703 generic.go:334] "Generic (PLEG): container finished" podID="5654372a-377f-47b2-a476-6f1a55395e6c" containerID="b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058" exitCode=0 Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.738631 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" event={"ID":"5654372a-377f-47b2-a476-6f1a55395e6c","Type":"ContainerDied","Data":"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058"} Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.738664 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" event={"ID":"5654372a-377f-47b2-a476-6f1a55395e6c","Type":"ContainerDied","Data":"b5a45dbf12f7fc27e4b85090f5118017fae787bd11378eea4da1eb8b68b8f670"} Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.738684 4703 scope.go:117] "RemoveContainer" containerID="b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.739015 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-image-registry/image-registry-697d97f7c8-wrpp2" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.761753 4703 scope.go:117] "RemoveContainer" containerID="b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058" Feb 02 12:58:04 crc kubenswrapper[4703]: E0202 12:58:04.762762 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058\": container with ID starting with b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058 not found: ID does not exist" containerID="b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.762825 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058"} err="failed to get container status \"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058\": rpc error: code = NotFound desc = could not find container \"b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058\": container with ID starting with b161a6c74b4ded6a6bb588b066bc67a10f147c16409b590a98abfb32f8d8d058 not found: ID does not exist" Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.781043 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:58:04 crc kubenswrapper[4703]: I0202 12:58:04.785704 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-image-registry/image-registry-697d97f7c8-wrpp2"] Feb 02 12:58:05 crc kubenswrapper[4703]: I0202 12:58:05.943695 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5654372a-377f-47b2-a476-6f1a55395e6c" path="/var/lib/kubelet/pods/5654372a-377f-47b2-a476-6f1a55395e6c/volumes" Feb 02 12:58:15 crc kubenswrapper[4703]: I0202 12:58:15.985434 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 12:58:15 crc kubenswrapper[4703]: I0202 12:58:15.985983 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 12:58:15 crc kubenswrapper[4703]: I0202 12:58:15.986041 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 12:58:15 crc kubenswrapper[4703]: I0202 12:58:15.986715 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 12:58:15 crc kubenswrapper[4703]: I0202 12:58:15.986778 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3" gracePeriod=600 Feb 02 12:58:16 crc kubenswrapper[4703]: I0202 12:58:16.844978 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3" exitCode=0 Feb 02 12:58:16 crc kubenswrapper[4703]: I0202 12:58:16.845079 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3"} Feb 02 12:58:16 crc kubenswrapper[4703]: I0202 12:58:16.845403 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87"} Feb 02 12:58:16 crc kubenswrapper[4703]: I0202 12:58:16.845431 4703 scope.go:117] "RemoveContainer" containerID="b4fef46297aa5f1de3f2a68699989ebf3089ccfd15c7a79039071d61fa6b6d96" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.190639 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc"] Feb 02 13:00:00 crc kubenswrapper[4703]: E0202 13:00:00.191840 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5654372a-377f-47b2-a476-6f1a55395e6c" containerName="registry" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.191855 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5654372a-377f-47b2-a476-6f1a55395e6c" containerName="registry" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.191962 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="5654372a-377f-47b2-a476-6f1a55395e6c" containerName="registry" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.193839 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.197589 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc"] Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.199381 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.199870 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.350207 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.351054 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-txlfl\" (UniqueName: \"kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.351140 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.457326 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.457549 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-txlfl\" (UniqueName: \"kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.457630 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.459176 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.469467 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.479256 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-txlfl\" (UniqueName: \"kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl\") pod \"collect-profiles-29500620-n6ptc\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.520045 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:00 crc kubenswrapper[4703]: I0202 13:00:00.779102 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc"] Feb 02 13:00:01 crc kubenswrapper[4703]: I0202 13:00:01.601360 4703 generic.go:334] "Generic (PLEG): container finished" podID="21235aea-3815-4e56-b0cd-46cd1263bac4" containerID="b30f5223c584bb2f96b94eb5609e0b7575a7b3df59c5e67b28f5d7b174e3bad2" exitCode=0 Feb 02 13:00:01 crc kubenswrapper[4703]: I0202 13:00:01.601441 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" event={"ID":"21235aea-3815-4e56-b0cd-46cd1263bac4","Type":"ContainerDied","Data":"b30f5223c584bb2f96b94eb5609e0b7575a7b3df59c5e67b28f5d7b174e3bad2"} Feb 02 13:00:01 crc kubenswrapper[4703]: I0202 13:00:01.601493 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" event={"ID":"21235aea-3815-4e56-b0cd-46cd1263bac4","Type":"ContainerStarted","Data":"4391aa3d064e1eff5627e6b042a537e93ae92c1e6f195ed4a17652db9ca56114"} Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.879353 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.905762 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-txlfl\" (UniqueName: \"kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl\") pod \"21235aea-3815-4e56-b0cd-46cd1263bac4\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.905879 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume\") pod \"21235aea-3815-4e56-b0cd-46cd1263bac4\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.906010 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume\") pod \"21235aea-3815-4e56-b0cd-46cd1263bac4\" (UID: \"21235aea-3815-4e56-b0cd-46cd1263bac4\") " Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.922168 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume" (OuterVolumeSpecName: "config-volume") pod "21235aea-3815-4e56-b0cd-46cd1263bac4" (UID: "21235aea-3815-4e56-b0cd-46cd1263bac4"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.923722 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "21235aea-3815-4e56-b0cd-46cd1263bac4" (UID: "21235aea-3815-4e56-b0cd-46cd1263bac4"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:00:02 crc kubenswrapper[4703]: I0202 13:00:02.923837 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl" (OuterVolumeSpecName: "kube-api-access-txlfl") pod "21235aea-3815-4e56-b0cd-46cd1263bac4" (UID: "21235aea-3815-4e56-b0cd-46cd1263bac4"). InnerVolumeSpecName "kube-api-access-txlfl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.007647 4703 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/21235aea-3815-4e56-b0cd-46cd1263bac4-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.007704 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-txlfl\" (UniqueName: \"kubernetes.io/projected/21235aea-3815-4e56-b0cd-46cd1263bac4-kube-api-access-txlfl\") on node \"crc\" DevicePath \"\"" Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.007716 4703 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/21235aea-3815-4e56-b0cd-46cd1263bac4-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.625698 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" event={"ID":"21235aea-3815-4e56-b0cd-46cd1263bac4","Type":"ContainerDied","Data":"4391aa3d064e1eff5627e6b042a537e93ae92c1e6f195ed4a17652db9ca56114"} Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.625770 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="4391aa3d064e1eff5627e6b042a537e93ae92c1e6f195ed4a17652db9ca56114" Feb 02 13:00:03 crc kubenswrapper[4703]: I0202 13:00:03.625845 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500620-n6ptc" Feb 02 13:00:45 crc kubenswrapper[4703]: I0202 13:00:45.984904 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:00:45 crc kubenswrapper[4703]: I0202 13:00:45.985962 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:01:15 crc kubenswrapper[4703]: I0202 13:01:15.985126 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:01:15 crc kubenswrapper[4703]: I0202 13:01:15.986062 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:01:45 crc kubenswrapper[4703]: I0202 13:01:45.985351 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:01:45 crc kubenswrapper[4703]: I0202 13:01:45.985926 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:01:45 crc kubenswrapper[4703]: I0202 13:01:45.985983 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:01:45 crc kubenswrapper[4703]: I0202 13:01:45.986548 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:01:45 crc kubenswrapper[4703]: I0202 13:01:45.986609 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87" gracePeriod=600 Feb 02 13:01:46 crc kubenswrapper[4703]: I0202 13:01:46.336266 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87" exitCode=0 Feb 02 13:01:46 crc kubenswrapper[4703]: I0202 13:01:46.336355 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87"} Feb 02 13:01:46 crc kubenswrapper[4703]: I0202 13:01:46.336734 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165"} Feb 02 13:01:46 crc kubenswrapper[4703]: I0202 13:01:46.336762 4703 scope.go:117] "RemoveContainer" containerID="75d8d8d4aca4ff712a827b14263bdea2a7cde424db43d84a36afba035ca666b3" Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.993551 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j8d97"] Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.995795 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-controller" containerID="cri-o://b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.996123 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="northd" containerID="cri-o://055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.996904 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="sbdb" containerID="cri-o://a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.997034 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-node" containerID="cri-o://0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.997143 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="nbdb" containerID="cri-o://91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.997451 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-acl-logging" containerID="cri-o://040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1" gracePeriod=30 Feb 02 13:02:59 crc kubenswrapper[4703]: I0202 13:02:59.997858 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-ovn-metrics" containerID="cri-o://bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9" gracePeriod=30 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.052626 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" containerID="cri-o://b63028c39d04964c40d876b91d5d5da77f2d4eebcfa357f0fc254c3a188aaa63" gracePeriod=30 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.834696 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovnkube-controller/3.log" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.837547 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-acl-logging/0.log" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838010 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-controller/0.log" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838477 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="b63028c39d04964c40d876b91d5d5da77f2d4eebcfa357f0fc254c3a188aaa63" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838518 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838538 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838550 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838561 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838569 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7" exitCode=0 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838577 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1" exitCode=143 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838586 4703 generic.go:334] "Generic (PLEG): container finished" podID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerID="b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4" exitCode=143 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838641 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"b63028c39d04964c40d876b91d5d5da77f2d4eebcfa357f0fc254c3a188aaa63"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838684 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838703 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838715 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838725 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838737 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838749 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.838784 4703 scope.go:117] "RemoveContainer" containerID="f3a7ef5ec407948d87abe12f104e1b4469ff7255d89123019161c31be2d255f6" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.842056 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/2.log" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.842666 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/1.log" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.842749 4703 generic.go:334] "Generic (PLEG): container finished" podID="5fe22056-9a8b-4eba-8776-c50531078e2f" containerID="6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c" exitCode=2 Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.842816 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerDied","Data":"6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c"} Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.843651 4703 scope.go:117] "RemoveContainer" containerID="6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c" Feb 02 13:03:00 crc kubenswrapper[4703]: E0202 13:03:00.844221 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8vjml_openshift-multus(5fe22056-9a8b-4eba-8776-c50531078e2f)\"" pod="openshift-multus/multus-8vjml" podUID="5fe22056-9a8b-4eba-8776-c50531078e2f" Feb 02 13:03:00 crc kubenswrapper[4703]: I0202 13:03:00.909098 4703 scope.go:117] "RemoveContainer" containerID="9b3a35a5e5c14ab138c05351248758c9929dc5ab37c62119f506e0df9ba4d705" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.347587 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-acl-logging/0.log" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.349163 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-controller/0.log" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.349725 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406389 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-qq8d8"] Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406688 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406707 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406721 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406733 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406750 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="sbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406757 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="sbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406771 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-node" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406781 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-node" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406793 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406801 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406811 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406819 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406830 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406838 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406852 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406859 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406871 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="21235aea-3815-4e56-b0cd-46cd1263bac4" containerName="collect-profiles" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406880 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="21235aea-3815-4e56-b0cd-46cd1263bac4" containerName="collect-profiles" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406895 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="nbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406903 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="nbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406914 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-acl-logging" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406921 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-acl-logging" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406933 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kubecfg-setup" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406941 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kubecfg-setup" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.406954 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="northd" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.406962 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="northd" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407096 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-node" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407112 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="kube-rbac-proxy-ovn-metrics" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407125 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407136 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="sbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407146 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="21235aea-3815-4e56-b0cd-46cd1263bac4" containerName="collect-profiles" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407159 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407168 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovn-acl-logging" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407179 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407189 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407200 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407210 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="northd" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407220 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="nbdb" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407231 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: E0202 13:03:01.407407 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.407420 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" containerName="ovnkube-controller" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.410654 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.490812 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491346 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491522 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491260 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log" (OuterVolumeSpecName: "node-log") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "node-log". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491598 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash" (OuterVolumeSpecName: "host-slash") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-slash". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491644 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch" (OuterVolumeSpecName: "etc-openvswitch") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "etc-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491664 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491847 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491872 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491888 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491907 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491926 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491976 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.491998 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492018 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492043 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492072 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492111 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492128 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sg6fs\" (UniqueName: \"kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492147 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492169 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492214 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492234 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert\") pod \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\" (UID: \"40400eeb-f9bd-4816-b65f-a25b0c3d021e\") " Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492410 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch" (OuterVolumeSpecName: "var-lib-openvswitch") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "var-lib-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492642 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch" (OuterVolumeSpecName: "run-openvswitch") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "run-openvswitch". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492675 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd" (OuterVolumeSpecName: "host-cni-netd") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-cni-netd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492707 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes" (OuterVolumeSpecName: "host-run-ovn-kubernetes") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-run-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492749 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns" (OuterVolumeSpecName: "host-run-netns") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-run-netns". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492765 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket" (OuterVolumeSpecName: "log-socket") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "log-socket". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492789 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units" (OuterVolumeSpecName: "systemd-units") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "systemd-units". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492797 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet" (OuterVolumeSpecName: "host-kubelet") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-kubelet". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492762 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes" (OuterVolumeSpecName: "host-var-lib-cni-networks-ovn-kubernetes") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-var-lib-cni-networks-ovn-kubernetes". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493082 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides" (OuterVolumeSpecName: "env-overrides") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "env-overrides". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493151 4703 reconciler_common.go:293] "Volume detached for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-node-log\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493176 4703 reconciler_common.go:293] "Volume detached for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-slash\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493189 4703 reconciler_common.go:293] "Volume detached for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-etc-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493203 4703 reconciler_common.go:293] "Volume detached for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-var-lib-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493216 4703 reconciler_common.go:293] "Volume detached for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-openvswitch\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493232 4703 reconciler_common.go:293] "Volume detached for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-systemd-units\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493246 4703 reconciler_common.go:293] "Volume detached for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-log-socket\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493260 4703 reconciler_common.go:293] "Volume detached for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-netd\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493382 4703 reconciler_common.go:293] "Volume detached for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-netns\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493404 4703 reconciler_common.go:293] "Volume detached for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-run-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493418 4703 reconciler_common.go:293] "Volume detached for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-var-lib-cni-networks-ovn-kubernetes\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493432 4703 reconciler_common.go:293] "Volume detached for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-kubelet\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.493165 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib" (OuterVolumeSpecName: "ovnkube-script-lib") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "ovnkube-script-lib". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.494446 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin" (OuterVolumeSpecName: "host-cni-bin") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "host-cni-bin". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.494614 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config" (OuterVolumeSpecName: "ovnkube-config") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "ovnkube-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.492376 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn" (OuterVolumeSpecName: "run-ovn") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "run-ovn". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.500387 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs" (OuterVolumeSpecName: "kube-api-access-sg6fs") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "kube-api-access-sg6fs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.506977 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert" (OuterVolumeSpecName: "ovn-node-metrics-cert") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "ovn-node-metrics-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.508469 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd" (OuterVolumeSpecName: "run-systemd") pod "40400eeb-f9bd-4816-b65f-a25b0c3d021e" (UID: "40400eeb-f9bd-4816-b65f-a25b0c3d021e"). InnerVolumeSpecName "run-systemd". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.594567 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595159 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-netd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595314 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595534 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-node-log\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595647 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-slash\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595804 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ceff25c-8747-4e01-9211-de9cef72ac03-ovn-node-metrics-cert\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.595962 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-ovn\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596053 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-log-socket\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596138 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-etc-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596208 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-config\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596296 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596375 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-netns\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596466 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-bin\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596543 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-systemd-units\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596629 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-var-lib-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596715 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-systemd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596791 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-kubelet\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596858 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-script-lib\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.596948 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-env-overrides\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597041 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-q7r2b\" (UniqueName: \"kubernetes.io/projected/9ceff25c-8747-4e01-9211-de9cef72ac03-kube-api-access-q7r2b\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597141 4703 reconciler_common.go:293] "Volume detached for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-env-overrides\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597195 4703 reconciler_common.go:293] "Volume detached for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-ovn\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597255 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sg6fs\" (UniqueName: \"kubernetes.io/projected/40400eeb-f9bd-4816-b65f-a25b0c3d021e-kube-api-access-sg6fs\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597447 4703 reconciler_common.go:293] "Volume detached for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-config\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597541 4703 reconciler_common.go:293] "Volume detached for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovnkube-script-lib\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597637 4703 reconciler_common.go:293] "Volume detached for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/40400eeb-f9bd-4816-b65f-a25b0c3d021e-ovn-node-metrics-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597719 4703 reconciler_common.go:293] "Volume detached for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-host-cni-bin\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.597984 4703 reconciler_common.go:293] "Volume detached for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/40400eeb-f9bd-4816-b65f-a25b0c3d021e-run-systemd\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699438 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-kubelet\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699514 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-script-lib\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699544 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-kubelet\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-kubelet\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699554 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-env-overrides\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699623 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-q7r2b\" (UniqueName: \"kubernetes.io/projected/9ceff25c-8747-4e01-9211-de9cef72ac03-kube-api-access-q7r2b\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699653 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699680 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-netd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699716 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699745 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-node-log\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699763 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-slash\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699808 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ceff25c-8747-4e01-9211-de9cef72ac03-ovn-node-metrics-cert\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699829 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-ovn\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699845 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-log-socket\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699867 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-etc-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699857 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-netd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-netd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699885 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-config\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.699992 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700035 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-bin\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700062 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-netns\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700100 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-systemd-units\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700547 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-netns\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-netns\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700629 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-cni-bin\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-cni-bin\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700679 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log-socket\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-log-socket\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700710 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-var-lib-cni-networks-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-var-lib-cni-networks-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700918 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-ovn\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-ovn\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700944 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"systemd-units\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-systemd-units\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700982 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-etc-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700973 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.701021 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-slash\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-slash\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.701109 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-var-lib-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700940 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"node-log\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-node-log\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.702022 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"env-overrides\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-env-overrides\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.702257 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-script-lib\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-script-lib\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700129 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-openvswitch\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-var-lib-openvswitch\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.700541 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"host-run-ovn-kubernetes\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-host-run-ovn-kubernetes\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.702577 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-systemd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.702703 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run-systemd\" (UniqueName: \"kubernetes.io/host-path/9ceff25c-8747-4e01-9211-de9cef72ac03-run-systemd\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.702971 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovnkube-config\" (UniqueName: \"kubernetes.io/configmap/9ceff25c-8747-4e01-9211-de9cef72ac03-ovnkube-config\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.713045 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ovn-node-metrics-cert\" (UniqueName: \"kubernetes.io/secret/9ceff25c-8747-4e01-9211-de9cef72ac03-ovn-node-metrics-cert\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.720319 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-q7r2b\" (UniqueName: \"kubernetes.io/projected/9ceff25c-8747-4e01-9211-de9cef72ac03-kube-api-access-q7r2b\") pod \"ovnkube-node-qq8d8\" (UID: \"9ceff25c-8747-4e01-9211-de9cef72ac03\") " pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.734305 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.856169 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"b92e8f40a40009105b7052fdd8b28a7fbe5ccef7e73e7461043c4df4540302d2"} Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.860192 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-acl-logging/0.log" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.860856 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-ovn-kubernetes_ovnkube-node-j8d97_40400eeb-f9bd-4816-b65f-a25b0c3d021e/ovn-controller/0.log" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.862196 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" event={"ID":"40400eeb-f9bd-4816-b65f-a25b0c3d021e","Type":"ContainerDied","Data":"ef67a9b05fafb34ba54381820a283ba99313f3c93c90327c3276f4355ddb87ef"} Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.862334 4703 scope.go:117] "RemoveContainer" containerID="b63028c39d04964c40d876b91d5d5da77f2d4eebcfa357f0fc254c3a188aaa63" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.862338 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-ovn-kubernetes/ovnkube-node-j8d97" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.864127 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/2.log" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.887223 4703 scope.go:117] "RemoveContainer" containerID="a99440b0ed030df3d48bcea482289a8063b8ebe9084eb7b390760bbb354d9691" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.907651 4703 scope.go:117] "RemoveContainer" containerID="91b64232d44924e689cc187ec38f809893b012e45e508fb3d0c46295cd9b3639" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.908179 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j8d97"] Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.912148 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-ovn-kubernetes/ovnkube-node-j8d97"] Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.923759 4703 scope.go:117] "RemoveContainer" containerID="055e6158c400c6fb2c2b7a65cec58755d818ebcd59528eac7057c91ba2bafc42" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.938175 4703 scope.go:117] "RemoveContainer" containerID="bb385cfeeaa788803d490063821c991619ab7e31c1c085ef8c49c9f856d8d5a9" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.940535 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="40400eeb-f9bd-4816-b65f-a25b0c3d021e" path="/var/lib/kubelet/pods/40400eeb-f9bd-4816-b65f-a25b0c3d021e/volumes" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.955161 4703 scope.go:117] "RemoveContainer" containerID="0ca553813db0f871c12499b4c2fc08598bde3a47ed0b5a2e0c06b9bd5a755bd7" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.971094 4703 scope.go:117] "RemoveContainer" containerID="040f1435aecaaa128eddf20383e1d667775be0465fd6db5fdd2495eaad870ff1" Feb 02 13:03:01 crc kubenswrapper[4703]: I0202 13:03:01.987114 4703 scope.go:117] "RemoveContainer" containerID="b662084949177dd7d76e6ea141c9f000e8f5abbd167a2a3cec8333836d3590b4" Feb 02 13:03:02 crc kubenswrapper[4703]: I0202 13:03:02.003369 4703 scope.go:117] "RemoveContainer" containerID="ca25b4d5762992612d259a2508d6c95d21f4a55111a192307905c3827b061737" Feb 02 13:03:02 crc kubenswrapper[4703]: I0202 13:03:02.870717 4703 generic.go:334] "Generic (PLEG): container finished" podID="9ceff25c-8747-4e01-9211-de9cef72ac03" containerID="3e735041a2d23e2928d4ac15046192bd452c100bf04146541a103e3bd0626dea" exitCode=0 Feb 02 13:03:02 crc kubenswrapper[4703]: I0202 13:03:02.870818 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerDied","Data":"3e735041a2d23e2928d4ac15046192bd452c100bf04146541a103e3bd0626dea"} Feb 02 13:03:03 crc kubenswrapper[4703]: I0202 13:03:03.879780 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"c86d64ec9292824f08f34c079dd75f9db1c264a0178626a6240ec93b030f148e"} Feb 02 13:03:03 crc kubenswrapper[4703]: I0202 13:03:03.880340 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"f502e6d7889d46f2d7a28854986208af9bb9f3a0ba56687f8f68257652d88ab1"} Feb 02 13:03:03 crc kubenswrapper[4703]: I0202 13:03:03.880356 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"405bd5ffc8a826150b81692b71db52f235ab61bb27c749ac4fc88bc11255e23a"} Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.661019 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c"] Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.662939 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.665226 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-marketplace"/"default-dockercfg-vmwhc" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.847027 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.847101 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.847139 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-m494s\" (UniqueName: \"kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.888877 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"9d12b1dc5d744926e9b91ae39ceee7a8c4f07be95dbf526f8320157f60c2d0b6"} Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.888932 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"71c737a66572cd96957b1da333bebd1c6f18662683e33e194e86f64c10fccd39"} Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.948516 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.948603 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.948661 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-m494s\" (UniqueName: \"kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.949185 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.949295 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:04.981380 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-m494s\" (UniqueName: \"kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s\") pod \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:05.280444 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: E0202 13:03:05.346008 4703 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(28ed5645604f087590b2168c3c6d117db935f5a82f8830be83692acecdd9b25a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 13:03:05 crc kubenswrapper[4703]: E0202 13:03:05.346127 4703 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(28ed5645604f087590b2168c3c6d117db935f5a82f8830be83692acecdd9b25a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: E0202 13:03:05.346161 4703 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(28ed5645604f087590b2168c3c6d117db935f5a82f8830be83692acecdd9b25a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:05 crc kubenswrapper[4703]: E0202 13:03:05.346244 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(28ed5645604f087590b2168c3c6d117db935f5a82f8830be83692acecdd9b25a): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" Feb 02 13:03:05 crc kubenswrapper[4703]: I0202 13:03:05.904006 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"f10d2f477f109b35c5e27e830556e513d6759143114c4c999d7f3585ed4239e5"} Feb 02 13:03:07 crc kubenswrapper[4703]: I0202 13:03:07.920565 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"fb70e9d97d96c529c3a2fa2a9cb335b941d7793df440cfea7d11631d20b48f42"} Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.883221 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c"] Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.884338 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.885041 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:10 crc kubenswrapper[4703]: E0202 13:03:10.911363 4703 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(17b452f57b200cd9d51617a904ffe61c8fb319be8a7cbf9ba5b7a1d2fd90a324): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 13:03:10 crc kubenswrapper[4703]: E0202 13:03:10.911465 4703 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(17b452f57b200cd9d51617a904ffe61c8fb319be8a7cbf9ba5b7a1d2fd90a324): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:10 crc kubenswrapper[4703]: E0202 13:03:10.911506 4703 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(17b452f57b200cd9d51617a904ffe61c8fb319be8a7cbf9ba5b7a1d2fd90a324): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:10 crc kubenswrapper[4703]: E0202 13:03:10.911594 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(17b452f57b200cd9d51617a904ffe61c8fb319be8a7cbf9ba5b7a1d2fd90a324): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.947763 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" event={"ID":"9ceff25c-8747-4e01-9211-de9cef72ac03","Type":"ContainerStarted","Data":"b64fc292d69080e0f315a4c48a2ad03ade70f9f86d996e30a87464528c20cadd"} Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.948024 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.948318 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.976042 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:10 crc kubenswrapper[4703]: I0202 13:03:10.981705 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" podStartSLOduration=9.981658174 podStartE2EDuration="9.981658174s" podCreationTimestamp="2026-02-02 13:03:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:03:10.976327831 +0000 UTC m=+717.991535365" watchObservedRunningTime="2026-02-02 13:03:10.981658174 +0000 UTC m=+717.996865718" Feb 02 13:03:11 crc kubenswrapper[4703]: I0202 13:03:11.734866 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:11 crc kubenswrapper[4703]: I0202 13:03:11.766366 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:13 crc kubenswrapper[4703]: I0202 13:03:13.936999 4703 scope.go:117] "RemoveContainer" containerID="6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c" Feb 02 13:03:13 crc kubenswrapper[4703]: E0202 13:03:13.937456 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"kube-multus\" with CrashLoopBackOff: \"back-off 20s restarting failed container=kube-multus pod=multus-8vjml_openshift-multus(5fe22056-9a8b-4eba-8776-c50531078e2f)\"" pod="openshift-multus/multus-8vjml" podUID="5fe22056-9a8b-4eba-8776-c50531078e2f" Feb 02 13:03:22 crc kubenswrapper[4703]: I0202 13:03:22.933004 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:22 crc kubenswrapper[4703]: I0202 13:03:22.935023 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:22 crc kubenswrapper[4703]: E0202 13:03:22.963219 4703 log.go:32] "RunPodSandbox from runtime service failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(8eb420e27a17ee91a7a66a1b640f23069eb177883a576d068e81f8a8fa4a9259): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" Feb 02 13:03:22 crc kubenswrapper[4703]: E0202 13:03:22.963977 4703 kuberuntime_sandbox.go:72] "Failed to create sandbox for pod" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(8eb420e27a17ee91a7a66a1b640f23069eb177883a576d068e81f8a8fa4a9259): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:22 crc kubenswrapper[4703]: E0202 13:03:22.964019 4703 kuberuntime_manager.go:1170] "CreatePodSandbox for pod failed" err="rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(8eb420e27a17ee91a7a66a1b640f23069eb177883a576d068e81f8a8fa4a9259): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:22 crc kubenswrapper[4703]: E0202 13:03:22.964082 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"CreatePodSandbox\" for \"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\" with CreatePodSandboxError: \"Failed to create sandbox for pod \\\"270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace(d06ca331-ef2a-42ae-a81d-286ae08693e5)\\\": rpc error: code = Unknown desc = failed to create pod network sandbox k8s_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_openshift-marketplace_d06ca331-ef2a-42ae-a81d-286ae08693e5_0(8eb420e27a17ee91a7a66a1b640f23069eb177883a576d068e81f8a8fa4a9259): no CNI configuration file in /etc/kubernetes/cni/net.d/. Has your network provider started?\"" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" Feb 02 13:03:27 crc kubenswrapper[4703]: I0202 13:03:27.934916 4703 scope.go:117] "RemoveContainer" containerID="6aa00aa209344e8c7103bb10bca8bb794141dbab88fce9586534f2cc8e58df6c" Feb 02 13:03:29 crc kubenswrapper[4703]: I0202 13:03:29.064705 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-multus_multus-8vjml_5fe22056-9a8b-4eba-8776-c50531078e2f/kube-multus/2.log" Feb 02 13:03:29 crc kubenswrapper[4703]: I0202 13:03:29.066026 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-multus/multus-8vjml" event={"ID":"5fe22056-9a8b-4eba-8776-c50531078e2f","Type":"ContainerStarted","Data":"6e740ef57deee19fde443dc84a0b5a299a817b93e5e12c72490e3a182b14ff37"} Feb 02 13:03:31 crc kubenswrapper[4703]: I0202 13:03:31.770020 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-ovn-kubernetes/ovnkube-node-qq8d8" Feb 02 13:03:35 crc kubenswrapper[4703]: I0202 13:03:35.933439 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:35 crc kubenswrapper[4703]: I0202 13:03:35.934355 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:36 crc kubenswrapper[4703]: I0202 13:03:36.186390 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c"] Feb 02 13:03:37 crc kubenswrapper[4703]: I0202 13:03:37.119001 4703 generic.go:334] "Generic (PLEG): container finished" podID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerID="ec866148676dee5d31aa13a1475bec754dd808f8486d5c31c8a9e7e5453cd2ed" exitCode=0 Feb 02 13:03:37 crc kubenswrapper[4703]: I0202 13:03:37.119080 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" event={"ID":"d06ca331-ef2a-42ae-a81d-286ae08693e5","Type":"ContainerDied","Data":"ec866148676dee5d31aa13a1475bec754dd808f8486d5c31c8a9e7e5453cd2ed"} Feb 02 13:03:37 crc kubenswrapper[4703]: I0202 13:03:37.119528 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" event={"ID":"d06ca331-ef2a-42ae-a81d-286ae08693e5","Type":"ContainerStarted","Data":"dad0f18d23a2a33f9fbdc09dd029e326c04b88bcc0f1bca203d282f732ba4a4c"} Feb 02 13:03:37 crc kubenswrapper[4703]: I0202 13:03:37.121357 4703 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 13:03:40 crc kubenswrapper[4703]: I0202 13:03:40.152872 4703 generic.go:334] "Generic (PLEG): container finished" podID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerID="def69e1fb7491bc6533dd2e8781f8c5feae6cf16d5bc2ace337c7dad5b1248ab" exitCode=0 Feb 02 13:03:40 crc kubenswrapper[4703]: I0202 13:03:40.154141 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" event={"ID":"d06ca331-ef2a-42ae-a81d-286ae08693e5","Type":"ContainerDied","Data":"def69e1fb7491bc6533dd2e8781f8c5feae6cf16d5bc2ace337c7dad5b1248ab"} Feb 02 13:03:41 crc kubenswrapper[4703]: I0202 13:03:41.161095 4703 generic.go:334] "Generic (PLEG): container finished" podID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerID="19e2424523abf9b457e6dc38efa60de63b31e8e5758aecba8b3065b2f971c7cc" exitCode=0 Feb 02 13:03:41 crc kubenswrapper[4703]: I0202 13:03:41.161242 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" event={"ID":"d06ca331-ef2a-42ae-a81d-286ae08693e5","Type":"ContainerDied","Data":"19e2424523abf9b457e6dc38efa60de63b31e8e5758aecba8b3065b2f971c7cc"} Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.414923 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.503107 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle\") pod \"d06ca331-ef2a-42ae-a81d-286ae08693e5\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.503675 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util\") pod \"d06ca331-ef2a-42ae-a81d-286ae08693e5\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.503726 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-m494s\" (UniqueName: \"kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s\") pod \"d06ca331-ef2a-42ae-a81d-286ae08693e5\" (UID: \"d06ca331-ef2a-42ae-a81d-286ae08693e5\") " Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.504371 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle" (OuterVolumeSpecName: "bundle") pod "d06ca331-ef2a-42ae-a81d-286ae08693e5" (UID: "d06ca331-ef2a-42ae-a81d-286ae08693e5"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.510923 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s" (OuterVolumeSpecName: "kube-api-access-m494s") pod "d06ca331-ef2a-42ae-a81d-286ae08693e5" (UID: "d06ca331-ef2a-42ae-a81d-286ae08693e5"). InnerVolumeSpecName "kube-api-access-m494s". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.529337 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util" (OuterVolumeSpecName: "util") pod "d06ca331-ef2a-42ae-a81d-286ae08693e5" (UID: "d06ca331-ef2a-42ae-a81d-286ae08693e5"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.605607 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-m494s\" (UniqueName: \"kubernetes.io/projected/d06ca331-ef2a-42ae-a81d-286ae08693e5-kube-api-access-m494s\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.605672 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:42 crc kubenswrapper[4703]: I0202 13:03:42.605684 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d06ca331-ef2a-42ae-a81d-286ae08693e5-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:03:43 crc kubenswrapper[4703]: I0202 13:03:43.180093 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" event={"ID":"d06ca331-ef2a-42ae-a81d-286ae08693e5","Type":"ContainerDied","Data":"dad0f18d23a2a33f9fbdc09dd029e326c04b88bcc0f1bca203d282f732ba4a4c"} Feb 02 13:03:43 crc kubenswrapper[4703]: I0202 13:03:43.180172 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dad0f18d23a2a33f9fbdc09dd029e326c04b88bcc0f1bca203d282f732ba4a4c" Feb 02 13:03:43 crc kubenswrapper[4703]: I0202 13:03:43.180320 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c" Feb 02 13:03:52 crc kubenswrapper[4703]: I0202 13:03:52.220490 4703 dynamic_cafile_content.go:123] "Loaded a new CA Bundle and Verifier" name="client-ca-bundle::/etc/kubernetes/kubelet-ca.crt" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.129658 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr"] Feb 02 13:03:53 crc kubenswrapper[4703]: E0202 13:03:53.130688 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="util" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.130800 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="util" Feb 02 13:03:53 crc kubenswrapper[4703]: E0202 13:03:53.130897 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="pull" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.130974 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="pull" Feb 02 13:03:53 crc kubenswrapper[4703]: E0202 13:03:53.131056 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="extract" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.131119 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="extract" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.131331 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="d06ca331-ef2a-42ae-a81d-286ae08693e5" containerName="extract" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.132126 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.134807 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"manager-account-dockercfg-2rlm5" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.135218 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-cert" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.135313 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"kube-root-ca.crt" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.136139 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"openshift-service-ca.crt" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.136975 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-controller-manager-service-cert" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.144968 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr"] Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.272345 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-apiservice-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.272406 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-webhook-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.272456 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mvq8d\" (UniqueName: \"kubernetes.io/projected/8d876cf0-0b55-4941-9b60-7258381875ec-kube-api-access-mvq8d\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.373357 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-apiservice-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.373431 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-webhook-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.373464 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mvq8d\" (UniqueName: \"kubernetes.io/projected/8d876cf0-0b55-4941-9b60-7258381875ec-kube-api-access-mvq8d\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.387355 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-apiservice-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.398790 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/8d876cf0-0b55-4941-9b60-7258381875ec-webhook-cert\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.404764 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/metallb-operator-webhook-server-65699778c9-782nc"] Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.405830 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.409413 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-operator-webhook-server-service-cert" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.411243 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-dockercfg-d8v5p" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.414756 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.423912 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65699778c9-782nc"] Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.443847 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mvq8d\" (UniqueName: \"kubernetes.io/projected/8d876cf0-0b55-4941-9b60-7258381875ec-kube-api-access-mvq8d\") pod \"metallb-operator-controller-manager-54d9d5b5d6-wdjdr\" (UID: \"8d876cf0-0b55-4941-9b60-7258381875ec\") " pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.450067 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.577711 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-webhook-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.578298 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9kzbl\" (UniqueName: \"kubernetes.io/projected/9c3d0481-1b4c-4d06-8605-bb6079b162b8-kube-api-access-9kzbl\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.578381 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-apiservice-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.688362 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9kzbl\" (UniqueName: \"kubernetes.io/projected/9c3d0481-1b4c-4d06-8605-bb6079b162b8-kube-api-access-9kzbl\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.688479 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-apiservice-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.688520 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-webhook-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.701592 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-webhook-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.707324 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/9c3d0481-1b4c-4d06-8605-bb6079b162b8-apiservice-cert\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.727432 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9kzbl\" (UniqueName: \"kubernetes.io/projected/9c3d0481-1b4c-4d06-8605-bb6079b162b8-kube-api-access-9kzbl\") pod \"metallb-operator-webhook-server-65699778c9-782nc\" (UID: \"9c3d0481-1b4c-4d06-8605-bb6079b162b8\") " pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:53 crc kubenswrapper[4703]: I0202 13:03:53.813652 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:03:54 crc kubenswrapper[4703]: I0202 13:03:54.082913 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr"] Feb 02 13:03:54 crc kubenswrapper[4703]: I0202 13:03:54.126001 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/metallb-operator-webhook-server-65699778c9-782nc"] Feb 02 13:03:54 crc kubenswrapper[4703]: W0202 13:03:54.139895 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod9c3d0481_1b4c_4d06_8605_bb6079b162b8.slice/crio-dad12ba7538a12749a6c78854a395f8af903e595041e7eca32a082f31ba5a44c WatchSource:0}: Error finding container dad12ba7538a12749a6c78854a395f8af903e595041e7eca32a082f31ba5a44c: Status 404 returned error can't find the container with id dad12ba7538a12749a6c78854a395f8af903e595041e7eca32a082f31ba5a44c Feb 02 13:03:54 crc kubenswrapper[4703]: I0202 13:03:54.278145 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" event={"ID":"9c3d0481-1b4c-4d06-8605-bb6079b162b8","Type":"ContainerStarted","Data":"dad12ba7538a12749a6c78854a395f8af903e595041e7eca32a082f31ba5a44c"} Feb 02 13:03:54 crc kubenswrapper[4703]: I0202 13:03:54.279816 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" event={"ID":"8d876cf0-0b55-4941-9b60-7258381875ec","Type":"ContainerStarted","Data":"fc8d9aa1af4e1f68811e1dd4c44e54c3cdb44df7663a59c979d9deed07a498a5"} Feb 02 13:03:58 crc kubenswrapper[4703]: I0202 13:03:58.315463 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" event={"ID":"8d876cf0-0b55-4941-9b60-7258381875ec","Type":"ContainerStarted","Data":"5c3bed9b50b421ee974c27e4774d0c4cce7aece4cb09672a491fbcc00cbf4f09"} Feb 02 13:03:58 crc kubenswrapper[4703]: I0202 13:03:58.316504 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:03:58 crc kubenswrapper[4703]: I0202 13:03:58.352284 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" podStartSLOduration=1.981451565 podStartE2EDuration="5.352237436s" podCreationTimestamp="2026-02-02 13:03:53 +0000 UTC" firstStartedPulling="2026-02-02 13:03:54.111905125 +0000 UTC m=+761.127112659" lastFinishedPulling="2026-02-02 13:03:57.482690996 +0000 UTC m=+764.497898530" observedRunningTime="2026-02-02 13:03:58.339393348 +0000 UTC m=+765.354600912" watchObservedRunningTime="2026-02-02 13:03:58.352237436 +0000 UTC m=+765.367444990" Feb 02 13:04:00 crc kubenswrapper[4703]: I0202 13:04:00.332430 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" event={"ID":"9c3d0481-1b4c-4d06-8605-bb6079b162b8","Type":"ContainerStarted","Data":"5756ae7589daa9a3af5e4bd7507071ce04620092bd37cfb92fd8ffcc52ba77d2"} Feb 02 13:04:00 crc kubenswrapper[4703]: I0202 13:04:00.333327 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:04:00 crc kubenswrapper[4703]: I0202 13:04:00.361112 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" podStartSLOduration=2.138551371 podStartE2EDuration="7.361087637s" podCreationTimestamp="2026-02-02 13:03:53 +0000 UTC" firstStartedPulling="2026-02-02 13:03:54.142070029 +0000 UTC m=+761.157277563" lastFinishedPulling="2026-02-02 13:03:59.364606295 +0000 UTC m=+766.379813829" observedRunningTime="2026-02-02 13:04:00.356443204 +0000 UTC m=+767.371650768" watchObservedRunningTime="2026-02-02 13:04:00.361087637 +0000 UTC m=+767.376295171" Feb 02 13:04:13 crc kubenswrapper[4703]: I0202 13:04:13.820035 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-webhook-server-65699778c9-782nc" Feb 02 13:04:15 crc kubenswrapper[4703]: I0202 13:04:15.985633 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:04:15 crc kubenswrapper[4703]: I0202 13:04:15.986400 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:04:33 crc kubenswrapper[4703]: I0202 13:04:33.454194 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/metallb-operator-controller-manager-54d9d5b5d6-wdjdr" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.190559 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-bqvnt"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.197529 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.198483 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.200504 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.224971 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-webhook-server-cert" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.225239 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-daemon-dockercfg-m78rz" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.225411 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"frr-startup" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.225745 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"frr-k8s-certs-secret" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.239315 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.286140 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.286714 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bchxt\" (UniqueName: \"kubernetes.io/projected/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-kube-api-access-bchxt\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.367127 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/speaker-zgzbl"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.368336 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.371151 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-dockercfg-jh7xq" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.371655 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"metallb-system"/"metallb-excludel2" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.375708 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-memberlist" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.375717 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"speaker-certs-secret" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389433 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389484 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389506 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389532 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bchxt\" (UniqueName: \"kubernetes.io/projected/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-kube-api-access-bchxt\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389552 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-startup\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389571 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-7ksk7\" (UniqueName: \"kubernetes.io/projected/3ead451e-1f12-417c-9bcf-59722dedbb65-kube-api-access-7ksk7\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389590 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/622b2024-9b5b-4964-aed7-52b15352cd1d-metallb-excludel2\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389609 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-reloader\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389636 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-sockets\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389663 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8zqtn\" (UniqueName: \"kubernetes.io/projected/622b2024-9b5b-4964-aed7-52b15352cd1d-kube-api-access-8zqtn\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389691 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389712 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-conf\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.389741 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics-certs\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.392983 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["metallb-system/controller-6968d8fdc4-wxlfd"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.393940 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.396897 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"controller-certs-secret" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.411350 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-cert\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.416307 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-wxlfd"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.434527 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bchxt\" (UniqueName: \"kubernetes.io/projected/1ff8e6a8-2795-4cb8-9550-75b3129ef6b4-kube-api-access-bchxt\") pod \"frr-k8s-webhook-server-7df86c4f6c-dfdkw\" (UID: \"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4\") " pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490795 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics-certs\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490862 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490893 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490913 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490945 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-startup\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490966 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-7ksk7\" (UniqueName: \"kubernetes.io/projected/3ead451e-1f12-417c-9bcf-59722dedbb65-kube-api-access-7ksk7\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.490988 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/622b2024-9b5b-4964-aed7-52b15352cd1d-metallb-excludel2\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.491010 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-reloader\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.491034 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-sockets\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.491063 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8zqtn\" (UniqueName: \"kubernetes.io/projected/622b2024-9b5b-4964-aed7-52b15352cd1d-kube-api-access-8zqtn\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.491104 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-conf\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.491568 4703 secret.go:188] Couldn't get secret metallb-system/speaker-certs-secret: secret "speaker-certs-secret" not found Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.491621 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs podName:622b2024-9b5b-4964-aed7-52b15352cd1d nodeName:}" failed. No retries permitted until 2026-02-02 13:04:34.991601629 +0000 UTC m=+802.006809163 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "metrics-certs" (UniqueName: "kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs") pod "speaker-zgzbl" (UID: "622b2024-9b5b-4964-aed7-52b15352cd1d") : secret "speaker-certs-secret" not found Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.491703 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-conf\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-conf\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.492090 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-sockets\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-sockets\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.492102 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"reloader\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-reloader\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.493252 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metallb-excludel2\" (UniqueName: \"kubernetes.io/configmap/622b2024-9b5b-4964-aed7-52b15352cd1d-metallb-excludel2\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.493382 4703 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.493450 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist podName:622b2024-9b5b-4964-aed7-52b15352cd1d nodeName:}" failed. No retries permitted until 2026-02-02 13:04:34.993432332 +0000 UTC m=+802.008639946 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist") pod "speaker-zgzbl" (UID: "622b2024-9b5b-4964-aed7-52b15352cd1d") : secret "metallb-memberlist" not found Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.493804 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics\" (UniqueName: \"kubernetes.io/empty-dir/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.494036 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"frr-startup\" (UniqueName: \"kubernetes.io/configmap/3ead451e-1f12-417c-9bcf-59722dedbb65-frr-startup\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.495447 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/3ead451e-1f12-417c-9bcf-59722dedbb65-metrics-certs\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.513541 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8zqtn\" (UniqueName: \"kubernetes.io/projected/622b2024-9b5b-4964-aed7-52b15352cd1d-kube-api-access-8zqtn\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.519590 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-7ksk7\" (UniqueName: \"kubernetes.io/projected/3ead451e-1f12-417c-9bcf-59722dedbb65-kube-api-access-7ksk7\") pod \"frr-k8s-bqvnt\" (UID: \"3ead451e-1f12-417c-9bcf-59722dedbb65\") " pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.524868 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.538547 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.594072 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zpwnn\" (UniqueName: \"kubernetes.io/projected/5fc3b818-d481-4eb8-b065-b03384d4f164-kube-api-access-zpwnn\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.594923 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-cert\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.594978 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-metrics-certs\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.695995 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zpwnn\" (UniqueName: \"kubernetes.io/projected/5fc3b818-d481-4eb8-b065-b03384d4f164-kube-api-access-zpwnn\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.696522 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-cert\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.696563 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-metrics-certs\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.698537 4703 reflector.go:368] Caches populated for *v1.Secret from object-"metallb-system"/"metallb-webhook-cert" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.703990 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-metrics-certs\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.710900 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"cert\" (UniqueName: \"kubernetes.io/secret/5fc3b818-d481-4eb8-b065-b03384d4f164-cert\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.715089 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zpwnn\" (UniqueName: \"kubernetes.io/projected/5fc3b818-d481-4eb8-b065-b03384d4f164-kube-api-access-zpwnn\") pod \"controller-6968d8fdc4-wxlfd\" (UID: \"5fc3b818-d481-4eb8-b065-b03384d4f164\") " pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.767057 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw"] Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.774585 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:34 crc kubenswrapper[4703]: W0202 13:04:34.774827 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod1ff8e6a8_2795_4cb8_9550_75b3129ef6b4.slice/crio-f46cb0bc25c0757e06ca916248ae783cd843f081d3fbf8939d1bdd532d9e42af WatchSource:0}: Error finding container f46cb0bc25c0757e06ca916248ae783cd843f081d3fbf8939d1bdd532d9e42af: Status 404 returned error can't find the container with id f46cb0bc25c0757e06ca916248ae783cd843f081d3fbf8939d1bdd532d9e42af Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.976017 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["metallb-system/controller-6968d8fdc4-wxlfd"] Feb 02 13:04:34 crc kubenswrapper[4703]: W0202 13:04:34.986000 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod5fc3b818_d481_4eb8_b065_b03384d4f164.slice/crio-cd1409cac53de5df3b1af31ec785a42d57621a146f7f925e6745b88c8084ba20 WatchSource:0}: Error finding container cd1409cac53de5df3b1af31ec785a42d57621a146f7f925e6745b88c8084ba20: Status 404 returned error can't find the container with id cd1409cac53de5df3b1af31ec785a42d57621a146f7f925e6745b88c8084ba20 Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.999463 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: I0202 13:04:34.999539 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.999713 4703 secret.go:188] Couldn't get secret metallb-system/metallb-memberlist: secret "metallb-memberlist" not found Feb 02 13:04:34 crc kubenswrapper[4703]: E0202 13:04:34.999806 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist podName:622b2024-9b5b-4964-aed7-52b15352cd1d nodeName:}" failed. No retries permitted until 2026-02-02 13:04:35.999780771 +0000 UTC m=+803.014988305 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "memberlist" (UniqueName: "kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist") pod "speaker-zgzbl" (UID: "622b2024-9b5b-4964-aed7-52b15352cd1d") : secret "metallb-memberlist" not found Feb 02 13:04:35 crc kubenswrapper[4703]: I0202 13:04:35.009664 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"metrics-certs\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-metrics-certs\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:35 crc kubenswrapper[4703]: I0202 13:04:35.544439 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" event={"ID":"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4","Type":"ContainerStarted","Data":"f46cb0bc25c0757e06ca916248ae783cd843f081d3fbf8939d1bdd532d9e42af"} Feb 02 13:04:35 crc kubenswrapper[4703]: I0202 13:04:35.546865 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-wxlfd" event={"ID":"5fc3b818-d481-4eb8-b065-b03384d4f164","Type":"ContainerStarted","Data":"e3bed8f36c3781c671e382488af0fb0c817686506234a24c48536d107b5dd56f"} Feb 02 13:04:35 crc kubenswrapper[4703]: I0202 13:04:35.546893 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-wxlfd" event={"ID":"5fc3b818-d481-4eb8-b065-b03384d4f164","Type":"ContainerStarted","Data":"cd1409cac53de5df3b1af31ec785a42d57621a146f7f925e6745b88c8084ba20"} Feb 02 13:04:35 crc kubenswrapper[4703]: I0202 13:04:35.547762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"58432fa24c06fa247014b1c400f5c765cf61f813260e277a3d1b4f17d8af135f"} Feb 02 13:04:36 crc kubenswrapper[4703]: I0202 13:04:36.013152 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:36 crc kubenswrapper[4703]: I0202 13:04:36.019479 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"memberlist\" (UniqueName: \"kubernetes.io/secret/622b2024-9b5b-4964-aed7-52b15352cd1d-memberlist\") pod \"speaker-zgzbl\" (UID: \"622b2024-9b5b-4964-aed7-52b15352cd1d\") " pod="metallb-system/speaker-zgzbl" Feb 02 13:04:36 crc kubenswrapper[4703]: I0202 13:04:36.186553 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="metallb-system/speaker-zgzbl" Feb 02 13:04:36 crc kubenswrapper[4703]: I0202 13:04:36.558664 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zgzbl" event={"ID":"622b2024-9b5b-4964-aed7-52b15352cd1d","Type":"ContainerStarted","Data":"62714fa56bd42c0266240a3f7a6dc04758950c46ab513699172af754adac80fd"} Feb 02 13:04:37 crc kubenswrapper[4703]: I0202 13:04:37.577002 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zgzbl" event={"ID":"622b2024-9b5b-4964-aed7-52b15352cd1d","Type":"ContainerStarted","Data":"a455f923c5c50a71d577792de09a724475e99fc78e37e79bd3474a232ac9ecd4"} Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.612988 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/controller-6968d8fdc4-wxlfd" event={"ID":"5fc3b818-d481-4eb8-b065-b03384d4f164","Type":"ContainerStarted","Data":"8207cb005b3d9f15dd8220f48429c490d4aa96cbb32231ac0f066c211f9465ce"} Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.613764 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.618254 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/speaker-zgzbl" event={"ID":"622b2024-9b5b-4964-aed7-52b15352cd1d","Type":"ContainerStarted","Data":"1fae0fa11b446c64040996bf6f335d5affa285776770bf4a7648fe1125aee56b"} Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.618726 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/speaker-zgzbl" Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.638251 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/controller-6968d8fdc4-wxlfd" podStartSLOduration=2.113651656 podStartE2EDuration="6.638228294s" podCreationTimestamp="2026-02-02 13:04:34 +0000 UTC" firstStartedPulling="2026-02-02 13:04:35.194338462 +0000 UTC m=+802.209545996" lastFinishedPulling="2026-02-02 13:04:39.7189151 +0000 UTC m=+806.734122634" observedRunningTime="2026-02-02 13:04:40.627917299 +0000 UTC m=+807.643124833" watchObservedRunningTime="2026-02-02 13:04:40.638228294 +0000 UTC m=+807.653435828" Feb 02 13:04:40 crc kubenswrapper[4703]: I0202 13:04:40.654872 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/speaker-zgzbl" podStartSLOduration=3.731868852 podStartE2EDuration="6.654797129s" podCreationTimestamp="2026-02-02 13:04:34 +0000 UTC" firstStartedPulling="2026-02-02 13:04:36.79691178 +0000 UTC m=+803.812119314" lastFinishedPulling="2026-02-02 13:04:39.719840057 +0000 UTC m=+806.735047591" observedRunningTime="2026-02-02 13:04:40.649076305 +0000 UTC m=+807.664283839" watchObservedRunningTime="2026-02-02 13:04:40.654797129 +0000 UTC m=+807.670004663" Feb 02 13:04:43 crc kubenswrapper[4703]: I0202 13:04:43.638380 4703 generic.go:334] "Generic (PLEG): container finished" podID="3ead451e-1f12-417c-9bcf-59722dedbb65" containerID="7363a553328120e9b33078755c56a6864739e769247ff77067203e563812bb98" exitCode=0 Feb 02 13:04:43 crc kubenswrapper[4703]: I0202 13:04:43.638492 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerDied","Data":"7363a553328120e9b33078755c56a6864739e769247ff77067203e563812bb98"} Feb 02 13:04:43 crc kubenswrapper[4703]: I0202 13:04:43.640974 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" event={"ID":"1ff8e6a8-2795-4cb8-9550-75b3129ef6b4","Type":"ContainerStarted","Data":"7d2955028d6e8def3e8b058e2c2006dcc2fdf73716d68c4c9b1b80f8756c414c"} Feb 02 13:04:43 crc kubenswrapper[4703]: I0202 13:04:43.641132 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:43 crc kubenswrapper[4703]: I0202 13:04:43.685378 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" podStartSLOduration=1.704029685 podStartE2EDuration="9.685355027s" podCreationTimestamp="2026-02-02 13:04:34 +0000 UTC" firstStartedPulling="2026-02-02 13:04:34.790263011 +0000 UTC m=+801.805470545" lastFinishedPulling="2026-02-02 13:04:42.771588353 +0000 UTC m=+809.786795887" observedRunningTime="2026-02-02 13:04:43.678933553 +0000 UTC m=+810.694141107" watchObservedRunningTime="2026-02-02 13:04:43.685355027 +0000 UTC m=+810.700562561" Feb 02 13:04:44 crc kubenswrapper[4703]: I0202 13:04:44.658538 4703 generic.go:334] "Generic (PLEG): container finished" podID="3ead451e-1f12-417c-9bcf-59722dedbb65" containerID="612482111c4496f61832c576c30fe896c5658181b647dc3342a60eac1d9d4d22" exitCode=0 Feb 02 13:04:44 crc kubenswrapper[4703]: I0202 13:04:44.659355 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerDied","Data":"612482111c4496f61832c576c30fe896c5658181b647dc3342a60eac1d9d4d22"} Feb 02 13:04:45 crc kubenswrapper[4703]: I0202 13:04:45.670885 4703 generic.go:334] "Generic (PLEG): container finished" podID="3ead451e-1f12-417c-9bcf-59722dedbb65" containerID="30bc91e4c3816a7d65b01f88005612484e21228bb610241021e657875acc3e6d" exitCode=0 Feb 02 13:04:45 crc kubenswrapper[4703]: I0202 13:04:45.670995 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerDied","Data":"30bc91e4c3816a7d65b01f88005612484e21228bb610241021e657875acc3e6d"} Feb 02 13:04:45 crc kubenswrapper[4703]: I0202 13:04:45.985449 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:04:45 crc kubenswrapper[4703]: I0202 13:04:45.985535 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.194124 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/speaker-zgzbl" Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.718894 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"e7f997a170cd990d843c4dfef254761253c556215cdcffb892ffd9a6ecf3ef77"} Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.718949 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"d9aca0fc75e8a0b455373b5e31b8bd9ceb0ea0eacdd35fd2effaf47f546bfb61"} Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.718961 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"0ece3983d5a308116c04992a0bfbb9963cd8efde132a56af43cd9044696a0ed8"} Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.718970 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"907b29646b26dafb02cac55ed9c1cba92fa462a0430a331ba3c764181da1ca87"} Feb 02 13:04:46 crc kubenswrapper[4703]: I0202 13:04:46.718981 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"8a3c463e9891acaf355eae741b14bcc1f477a6cf24409d359128aaa0f8b9d967"} Feb 02 13:04:47 crc kubenswrapper[4703]: I0202 13:04:47.732121 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="metallb-system/frr-k8s-bqvnt" event={"ID":"3ead451e-1f12-417c-9bcf-59722dedbb65","Type":"ContainerStarted","Data":"7eb94149f52a578c134b3eb52332362ca478838ba3e1cbd4f83245ab7729ce68"} Feb 02 13:04:47 crc kubenswrapper[4703]: I0202 13:04:47.733199 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:49 crc kubenswrapper[4703]: I0202 13:04:49.526180 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:49 crc kubenswrapper[4703]: I0202 13:04:49.567581 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:04:49 crc kubenswrapper[4703]: I0202 13:04:49.590500 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="metallb-system/frr-k8s-bqvnt" podStartSLOduration=7.530017651 podStartE2EDuration="15.590477248s" podCreationTimestamp="2026-02-02 13:04:34 +0000 UTC" firstStartedPulling="2026-02-02 13:04:34.691094342 +0000 UTC m=+801.706301876" lastFinishedPulling="2026-02-02 13:04:42.751553929 +0000 UTC m=+809.766761473" observedRunningTime="2026-02-02 13:04:47.756966146 +0000 UTC m=+814.772173680" watchObservedRunningTime="2026-02-02 13:04:49.590477248 +0000 UTC m=+816.605684782" Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.893205 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.894185 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.897414 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"kube-root-ca.crt" Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.897807 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-index-dockercfg-x8j7q" Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.900765 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openstack-operators"/"openshift-service-ca.crt" Feb 02 13:04:52 crc kubenswrapper[4703]: I0202 13:04:52.909572 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.001833 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-99kq2\" (UniqueName: \"kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2\") pod \"mariadb-operator-index-6p6h6\" (UID: \"ad2463d0-0110-4de5-8371-206e16e285a4\") " pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.103595 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-99kq2\" (UniqueName: \"kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2\") pod \"mariadb-operator-index-6p6h6\" (UID: \"ad2463d0-0110-4de5-8371-206e16e285a4\") " pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.128365 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-99kq2\" (UniqueName: \"kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2\") pod \"mariadb-operator-index-6p6h6\" (UID: \"ad2463d0-0110-4de5-8371-206e16e285a4\") " pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.214239 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.430913 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:04:53 crc kubenswrapper[4703]: W0202 13:04:53.438486 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podad2463d0_0110_4de5_8371_206e16e285a4.slice/crio-23faca83b42d81364765217fdf7c03b68db272da10b47dea987e2a4c8b12993d WatchSource:0}: Error finding container 23faca83b42d81364765217fdf7c03b68db272da10b47dea987e2a4c8b12993d: Status 404 returned error can't find the container with id 23faca83b42d81364765217fdf7c03b68db272da10b47dea987e2a4c8b12993d Feb 02 13:04:53 crc kubenswrapper[4703]: I0202 13:04:53.777870 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6p6h6" event={"ID":"ad2463d0-0110-4de5-8371-206e16e285a4","Type":"ContainerStarted","Data":"23faca83b42d81364765217fdf7c03b68db272da10b47dea987e2a4c8b12993d"} Feb 02 13:04:54 crc kubenswrapper[4703]: I0202 13:04:54.544170 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-webhook-server-7df86c4f6c-dfdkw" Feb 02 13:04:54 crc kubenswrapper[4703]: I0202 13:04:54.779398 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/controller-6968d8fdc4-wxlfd" Feb 02 13:04:58 crc kubenswrapper[4703]: I0202 13:04:58.849676 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6p6h6" event={"ID":"ad2463d0-0110-4de5-8371-206e16e285a4","Type":"ContainerStarted","Data":"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8"} Feb 02 13:04:58 crc kubenswrapper[4703]: I0202 13:04:58.871964 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-index-6p6h6" podStartSLOduration=2.113457513 podStartE2EDuration="6.871932348s" podCreationTimestamp="2026-02-02 13:04:52 +0000 UTC" firstStartedPulling="2026-02-02 13:04:53.441200132 +0000 UTC m=+820.456407666" lastFinishedPulling="2026-02-02 13:04:58.199674957 +0000 UTC m=+825.214882501" observedRunningTime="2026-02-02 13:04:58.867664275 +0000 UTC m=+825.882871809" watchObservedRunningTime="2026-02-02 13:04:58.871932348 +0000 UTC m=+825.887139892" Feb 02 13:05:03 crc kubenswrapper[4703]: I0202 13:05:03.215711 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:05:03 crc kubenswrapper[4703]: I0202 13:05:03.216230 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:05:03 crc kubenswrapper[4703]: I0202 13:05:03.249901 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:05:03 crc kubenswrapper[4703]: I0202 13:05:03.915167 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:05:04 crc kubenswrapper[4703]: I0202 13:05:04.530766 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="metallb-system/frr-k8s-bqvnt" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.725028 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5"] Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.726632 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.731070 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t9m2k" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.731639 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.731722 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.731762 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fw4fn\" (UniqueName: \"kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.744623 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5"] Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.832802 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.833469 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.833549 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fw4fn\" (UniqueName: \"kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.833601 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.834013 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:08 crc kubenswrapper[4703]: I0202 13:05:08.859316 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fw4fn\" (UniqueName: \"kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn\") pod \"f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:09 crc kubenswrapper[4703]: I0202 13:05:09.051560 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:09 crc kubenswrapper[4703]: I0202 13:05:09.520053 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5"] Feb 02 13:05:09 crc kubenswrapper[4703]: I0202 13:05:09.939466 4703 generic.go:334] "Generic (PLEG): container finished" podID="d12aa798-a139-47d8-b857-1061cb464090" containerID="6f07c489b72e0860db3df4679565d0a777680586ad68b79b9d66aebf483023e3" exitCode=0 Feb 02 13:05:09 crc kubenswrapper[4703]: I0202 13:05:09.940699 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" event={"ID":"d12aa798-a139-47d8-b857-1061cb464090","Type":"ContainerDied","Data":"6f07c489b72e0860db3df4679565d0a777680586ad68b79b9d66aebf483023e3"} Feb 02 13:05:09 crc kubenswrapper[4703]: I0202 13:05:09.940745 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" event={"ID":"d12aa798-a139-47d8-b857-1061cb464090","Type":"ContainerStarted","Data":"715248fedf8fa9a8c1eb5215564bbb5d4b8225b5b37d7c0a1bed0a034636e602"} Feb 02 13:05:10 crc kubenswrapper[4703]: I0202 13:05:10.948402 4703 generic.go:334] "Generic (PLEG): container finished" podID="d12aa798-a139-47d8-b857-1061cb464090" containerID="2e3b426cec04bb0a659fa3447e60c6f19d3e78cb10b979be86c3697a2c9dbdf5" exitCode=0 Feb 02 13:05:10 crc kubenswrapper[4703]: I0202 13:05:10.948589 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" event={"ID":"d12aa798-a139-47d8-b857-1061cb464090","Type":"ContainerDied","Data":"2e3b426cec04bb0a659fa3447e60c6f19d3e78cb10b979be86c3697a2c9dbdf5"} Feb 02 13:05:11 crc kubenswrapper[4703]: I0202 13:05:11.960220 4703 generic.go:334] "Generic (PLEG): container finished" podID="d12aa798-a139-47d8-b857-1061cb464090" containerID="e0d6f8eab6a1246f8a6b80e3d1827552b9981ac99ca9e492c4778f60782ce0ab" exitCode=0 Feb 02 13:05:11 crc kubenswrapper[4703]: I0202 13:05:11.960299 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" event={"ID":"d12aa798-a139-47d8-b857-1061cb464090","Type":"ContainerDied","Data":"e0d6f8eab6a1246f8a6b80e3d1827552b9981ac99ca9e492c4778f60782ce0ab"} Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.205289 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.303170 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle\") pod \"d12aa798-a139-47d8-b857-1061cb464090\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.303238 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fw4fn\" (UniqueName: \"kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn\") pod \"d12aa798-a139-47d8-b857-1061cb464090\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.303314 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util\") pod \"d12aa798-a139-47d8-b857-1061cb464090\" (UID: \"d12aa798-a139-47d8-b857-1061cb464090\") " Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.304429 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle" (OuterVolumeSpecName: "bundle") pod "d12aa798-a139-47d8-b857-1061cb464090" (UID: "d12aa798-a139-47d8-b857-1061cb464090"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.311478 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn" (OuterVolumeSpecName: "kube-api-access-fw4fn") pod "d12aa798-a139-47d8-b857-1061cb464090" (UID: "d12aa798-a139-47d8-b857-1061cb464090"). InnerVolumeSpecName "kube-api-access-fw4fn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.318460 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util" (OuterVolumeSpecName: "util") pod "d12aa798-a139-47d8-b857-1061cb464090" (UID: "d12aa798-a139-47d8-b857-1061cb464090"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.405402 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.405445 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fw4fn\" (UniqueName: \"kubernetes.io/projected/d12aa798-a139-47d8-b857-1061cb464090-kube-api-access-fw4fn\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.405460 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/d12aa798-a139-47d8-b857-1061cb464090-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.976109 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" event={"ID":"d12aa798-a139-47d8-b857-1061cb464090","Type":"ContainerDied","Data":"715248fedf8fa9a8c1eb5215564bbb5d4b8225b5b37d7c0a1bed0a034636e602"} Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.976566 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="715248fedf8fa9a8c1eb5215564bbb5d4b8225b5b37d7c0a1bed0a034636e602" Feb 02 13:05:13 crc kubenswrapper[4703]: I0202 13:05:13.976240 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5" Feb 02 13:05:15 crc kubenswrapper[4703]: I0202 13:05:15.984693 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:05:15 crc kubenswrapper[4703]: I0202 13:05:15.985419 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:05:15 crc kubenswrapper[4703]: I0202 13:05:15.985533 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:05:15 crc kubenswrapper[4703]: I0202 13:05:15.986958 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:05:15 crc kubenswrapper[4703]: I0202 13:05:15.987057 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165" gracePeriod=600 Feb 02 13:05:17 crc kubenswrapper[4703]: I0202 13:05:17.001977 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165" exitCode=0 Feb 02 13:05:17 crc kubenswrapper[4703]: I0202 13:05:17.002090 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165"} Feb 02 13:05:17 crc kubenswrapper[4703]: I0202 13:05:17.002540 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355"} Feb 02 13:05:17 crc kubenswrapper[4703]: I0202 13:05:17.002567 4703 scope.go:117] "RemoveContainer" containerID="6feb20f2143784f14dcb3a19aeabc7b847878d2ce98f7eb53b072d47f5874e87" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.919296 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:05:21 crc kubenswrapper[4703]: E0202 13:05:21.920386 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="pull" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.920413 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="pull" Feb 02 13:05:21 crc kubenswrapper[4703]: E0202 13:05:21.920432 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="extract" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.920442 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="extract" Feb 02 13:05:21 crc kubenswrapper[4703]: E0202 13:05:21.920463 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="util" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.920471 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="util" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.920643 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="d12aa798-a139-47d8-b857-1061cb464090" containerName="extract" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.921362 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.923815 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"webhook-server-cert" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.923975 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-dockercfg-b29g9" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.924083 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"mariadb-operator-controller-manager-service-cert" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.942127 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.944663 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-hjtf8\" (UniqueName: \"kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.944763 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:21 crc kubenswrapper[4703]: I0202 13:05:21.944792 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.045570 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.045619 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.045659 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-hjtf8\" (UniqueName: \"kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.051164 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.051234 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.067109 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-hjtf8\" (UniqueName: \"kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8\") pod \"mariadb-operator-controller-manager-fb6479fb9-74xhb\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.241871 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:22 crc kubenswrapper[4703]: I0202 13:05:22.503012 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:05:23 crc kubenswrapper[4703]: I0202 13:05:23.052720 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" event={"ID":"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa","Type":"ContainerStarted","Data":"5ee66274935d1b19becd49a716de690446ed13b412c332849c7ba9c7ada8ac6f"} Feb 02 13:05:26 crc kubenswrapper[4703]: I0202 13:05:26.072928 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" event={"ID":"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa","Type":"ContainerStarted","Data":"7bc9aad6c07bba85aaf138f1a45e4c3cdf066a3673955ba9ad54df25d07fc5e5"} Feb 02 13:05:26 crc kubenswrapper[4703]: I0202 13:05:26.073514 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:26 crc kubenswrapper[4703]: I0202 13:05:26.096077 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" podStartSLOduration=1.721762338 podStartE2EDuration="5.096050448s" podCreationTimestamp="2026-02-02 13:05:21 +0000 UTC" firstStartedPulling="2026-02-02 13:05:22.512846357 +0000 UTC m=+849.528053891" lastFinishedPulling="2026-02-02 13:05:25.887134467 +0000 UTC m=+852.902342001" observedRunningTime="2026-02-02 13:05:26.090490238 +0000 UTC m=+853.105697792" watchObservedRunningTime="2026-02-02 13:05:26.096050448 +0000 UTC m=+853.111257982" Feb 02 13:05:32 crc kubenswrapper[4703]: I0202 13:05:32.247612 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.613642 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.614590 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.616816 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-index-dockercfg-xdlsx" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.630726 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.660136 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-xtq5d\" (UniqueName: \"kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d\") pod \"infra-operator-index-8d6lm\" (UID: \"a5197757-55f2-4c6b-96a4-f3c838e0ea9f\") " pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.761073 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-xtq5d\" (UniqueName: \"kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d\") pod \"infra-operator-index-8d6lm\" (UID: \"a5197757-55f2-4c6b-96a4-f3c838e0ea9f\") " pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.781292 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-xtq5d\" (UniqueName: \"kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d\") pod \"infra-operator-index-8d6lm\" (UID: \"a5197757-55f2-4c6b-96a4-f3c838e0ea9f\") " pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:34 crc kubenswrapper[4703]: I0202 13:05:34.978536 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:35 crc kubenswrapper[4703]: I0202 13:05:35.359985 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:05:35 crc kubenswrapper[4703]: W0202 13:05:35.361582 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-poda5197757_55f2_4c6b_96a4_f3c838e0ea9f.slice/crio-467a46d466cb73c3726306dab1cc8c5835637ff2ac9bfea0cb2b26a110418005 WatchSource:0}: Error finding container 467a46d466cb73c3726306dab1cc8c5835637ff2ac9bfea0cb2b26a110418005: Status 404 returned error can't find the container with id 467a46d466cb73c3726306dab1cc8c5835637ff2ac9bfea0cb2b26a110418005 Feb 02 13:05:36 crc kubenswrapper[4703]: I0202 13:05:36.181748 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-8d6lm" event={"ID":"a5197757-55f2-4c6b-96a4-f3c838e0ea9f","Type":"ContainerStarted","Data":"467a46d466cb73c3726306dab1cc8c5835637ff2ac9bfea0cb2b26a110418005"} Feb 02 13:05:37 crc kubenswrapper[4703]: I0202 13:05:37.189992 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-8d6lm" event={"ID":"a5197757-55f2-4c6b-96a4-f3c838e0ea9f","Type":"ContainerStarted","Data":"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2"} Feb 02 13:05:44 crc kubenswrapper[4703]: I0202 13:05:44.979307 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:44 crc kubenswrapper[4703]: I0202 13:05:44.979936 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:45 crc kubenswrapper[4703]: I0202 13:05:45.012258 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:45 crc kubenswrapper[4703]: I0202 13:05:45.039651 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-index-8d6lm" podStartSLOduration=10.216737454 podStartE2EDuration="11.039620874s" podCreationTimestamp="2026-02-02 13:05:34 +0000 UTC" firstStartedPulling="2026-02-02 13:05:35.364379156 +0000 UTC m=+862.379586690" lastFinishedPulling="2026-02-02 13:05:36.187262576 +0000 UTC m=+863.202470110" observedRunningTime="2026-02-02 13:05:37.20805021 +0000 UTC m=+864.223257744" watchObservedRunningTime="2026-02-02 13:05:45.039620874 +0000 UTC m=+872.054828408" Feb 02 13:05:45 crc kubenswrapper[4703]: I0202 13:05:45.269619 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.253911 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp"] Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.255222 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.256928 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t9m2k" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.269047 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp"] Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.319311 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.319422 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.319456 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.420059 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.420117 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.420185 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.420658 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.420879 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.451176 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44\") pod \"d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:46 crc kubenswrapper[4703]: I0202 13:05:46.572179 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:47 crc kubenswrapper[4703]: I0202 13:05:47.011019 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp"] Feb 02 13:05:47 crc kubenswrapper[4703]: I0202 13:05:47.254851 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerStarted","Data":"23076747b5aa03926b67b5ca04faa7ac461f0a81964e9f049ea029a5febf7b60"} Feb 02 13:05:47 crc kubenswrapper[4703]: I0202 13:05:47.254892 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerStarted","Data":"0624ea06c7dde86f295a6b39ef6910ab9017be85f543e8252b3ffe554360ca4b"} Feb 02 13:05:48 crc kubenswrapper[4703]: I0202 13:05:48.262733 4703 generic.go:334] "Generic (PLEG): container finished" podID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerID="23076747b5aa03926b67b5ca04faa7ac461f0a81964e9f049ea029a5febf7b60" exitCode=0 Feb 02 13:05:48 crc kubenswrapper[4703]: I0202 13:05:48.262787 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerDied","Data":"23076747b5aa03926b67b5ca04faa7ac461f0a81964e9f049ea029a5febf7b60"} Feb 02 13:05:50 crc kubenswrapper[4703]: I0202 13:05:50.283062 4703 generic.go:334] "Generic (PLEG): container finished" podID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerID="c0daaada96fa8c5e12d5b0d75b8fa3015d9cdbc7a659a1b5a7ef7237217ff9df" exitCode=0 Feb 02 13:05:50 crc kubenswrapper[4703]: I0202 13:05:50.283177 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerDied","Data":"c0daaada96fa8c5e12d5b0d75b8fa3015d9cdbc7a659a1b5a7ef7237217ff9df"} Feb 02 13:05:51 crc kubenswrapper[4703]: I0202 13:05:51.297192 4703 generic.go:334] "Generic (PLEG): container finished" podID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerID="ae8533f45ccc1a0d8810e173491a5ef92cb921cc5cb9b4d32d969268d7337415" exitCode=0 Feb 02 13:05:51 crc kubenswrapper[4703]: I0202 13:05:51.297238 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerDied","Data":"ae8533f45ccc1a0d8810e173491a5ef92cb921cc5cb9b4d32d969268d7337415"} Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.537991 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.705943 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44\") pod \"2a49a90c-5c4a-4b7f-b478-6434706fa241\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.706446 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util\") pod \"2a49a90c-5c4a-4b7f-b478-6434706fa241\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.706610 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle\") pod \"2a49a90c-5c4a-4b7f-b478-6434706fa241\" (UID: \"2a49a90c-5c4a-4b7f-b478-6434706fa241\") " Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.708558 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle" (OuterVolumeSpecName: "bundle") pod "2a49a90c-5c4a-4b7f-b478-6434706fa241" (UID: "2a49a90c-5c4a-4b7f-b478-6434706fa241"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.712853 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44" (OuterVolumeSpecName: "kube-api-access-54x44") pod "2a49a90c-5c4a-4b7f-b478-6434706fa241" (UID: "2a49a90c-5c4a-4b7f-b478-6434706fa241"). InnerVolumeSpecName "kube-api-access-54x44". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.807816 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.807851 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-54x44\" (UniqueName: \"kubernetes.io/projected/2a49a90c-5c4a-4b7f-b478-6434706fa241-kube-api-access-54x44\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:52 crc kubenswrapper[4703]: I0202 13:05:52.990834 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util" (OuterVolumeSpecName: "util") pod "2a49a90c-5c4a-4b7f-b478-6434706fa241" (UID: "2a49a90c-5c4a-4b7f-b478-6434706fa241"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:05:53 crc kubenswrapper[4703]: I0202 13:05:53.010181 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/2a49a90c-5c4a-4b7f-b478-6434706fa241-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:05:53 crc kubenswrapper[4703]: I0202 13:05:53.310884 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" Feb 02 13:05:53 crc kubenswrapper[4703]: I0202 13:05:53.310882 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp" event={"ID":"2a49a90c-5c4a-4b7f-b478-6434706fa241","Type":"ContainerDied","Data":"0624ea06c7dde86f295a6b39ef6910ab9017be85f543e8252b3ffe554360ca4b"} Feb 02 13:05:53 crc kubenswrapper[4703]: I0202 13:05:53.310943 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="0624ea06c7dde86f295a6b39ef6910ab9017be85f543e8252b3ffe554360ca4b" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.747516 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:06:03 crc kubenswrapper[4703]: E0202 13:06:03.749520 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="pull" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.749628 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="pull" Feb 02 13:06:03 crc kubenswrapper[4703]: E0202 13:06:03.749726 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="extract" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.749805 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="extract" Feb 02 13:06:03 crc kubenswrapper[4703]: E0202 13:06:03.749871 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="util" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.749937 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="util" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.750121 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" containerName="extract" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.750708 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.753298 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-service-cert" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.754288 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"infra-operator-controller-manager-dockercfg-bwbv7" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.755126 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.755169 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5kkjp\" (UniqueName: \"kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.755412 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.773956 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.856200 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.856543 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.856686 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5kkjp\" (UniqueName: \"kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.870225 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.870668 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:03 crc kubenswrapper[4703]: I0202 13:06:03.878229 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5kkjp\" (UniqueName: \"kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp\") pod \"infra-operator-controller-manager-8ff54f68-8fjxm\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:04 crc kubenswrapper[4703]: I0202 13:06:04.070356 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:04 crc kubenswrapper[4703]: I0202 13:06:04.530969 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:06:05 crc kubenswrapper[4703]: I0202 13:06:05.385595 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" event={"ID":"1a37f2c9-188b-4c10-ac43-035262781444","Type":"ContainerStarted","Data":"463ab6c74c08eed08129bcb3a7fc8d0ae649a879c68310137e474aa0d349c552"} Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.750531 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.751851 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.753788 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openstack-config-data" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.753931 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"kube-root-ca.crt" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.755417 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openstack-scripts" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.755834 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"openshift-service-ca.crt" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.755920 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"galera-openstack-dockercfg-kxjvv" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.767206 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.771878 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.772887 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.809557 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.810682 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.814232 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.826018 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889645 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889710 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889735 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889765 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889786 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-p2nzd\" (UniqueName: \"kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889803 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889828 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vgd84\" (UniqueName: \"kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889846 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889861 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889884 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889916 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889932 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6h57d\" (UniqueName: \"kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889948 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889969 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.889985 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.890000 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.890018 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.890032 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991757 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991809 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991830 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991868 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991883 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991912 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6h57d\" (UniqueName: \"kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991932 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991949 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991973 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.991992 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992005 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992025 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992057 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992073 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992107 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992126 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992142 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-p2nzd\" (UniqueName: \"kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.992163 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vgd84\" (UniqueName: \"kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.993695 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.993854 4703 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") device mount path \"/mnt/openstack/pv01\"" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.993945 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.994033 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.994585 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.994989 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.995416 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.995546 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.995872 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.996109 4703 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") device mount path \"/mnt/openstack/pv11\"" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.996196 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.996332 4703 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") device mount path \"/mnt/openstack/pv02\"" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.997039 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:07 crc kubenswrapper[4703]: I0202 13:06:07.997208 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.008855 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.015612 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.017250 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-p2nzd\" (UniqueName: \"kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.017505 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6h57d\" (UniqueName: \"kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.018032 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vgd84\" (UniqueName: \"kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84\") pod \"openstack-galera-0\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.019680 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"openstack-galera-2\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.020156 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"openstack-galera-1\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.090179 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.117569 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.137054 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.410514 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" event={"ID":"1a37f2c9-188b-4c10-ac43-035262781444","Type":"ContainerStarted","Data":"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b"} Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.410817 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.431104 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" podStartSLOduration=2.410008288 podStartE2EDuration="5.431056412s" podCreationTimestamp="2026-02-02 13:06:03 +0000 UTC" firstStartedPulling="2026-02-02 13:06:04.540675341 +0000 UTC m=+891.555882875" lastFinishedPulling="2026-02-02 13:06:07.561723465 +0000 UTC m=+894.576930999" observedRunningTime="2026-02-02 13:06:08.42855029 +0000 UTC m=+895.443757844" watchObservedRunningTime="2026-02-02 13:06:08.431056412 +0000 UTC m=+895.446263946" Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.548084 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.598604 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:06:08 crc kubenswrapper[4703]: W0202 13:06:08.610729 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode5582b3d_ce11_478d_b841_587e8d50dcd9.slice/crio-80a0bf2ed11ac8b1b7ecf06257a4225dab3b4136277c403de5e9eb87001317ef WatchSource:0}: Error finding container 80a0bf2ed11ac8b1b7ecf06257a4225dab3b4136277c403de5e9eb87001317ef: Status 404 returned error can't find the container with id 80a0bf2ed11ac8b1b7ecf06257a4225dab3b4136277c403de5e9eb87001317ef Feb 02 13:06:08 crc kubenswrapper[4703]: I0202 13:06:08.662653 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:06:08 crc kubenswrapper[4703]: W0202 13:06:08.672661 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pode923beff_a23d_4f99_a05f_f48d59515e7e.slice/crio-9ff581dd16a21a18c7456e0429f48de220898260ff4ff5192c59b7760535ee69 WatchSource:0}: Error finding container 9ff581dd16a21a18c7456e0429f48de220898260ff4ff5192c59b7760535ee69: Status 404 returned error can't find the container with id 9ff581dd16a21a18c7456e0429f48de220898260ff4ff5192c59b7760535ee69 Feb 02 13:06:09 crc kubenswrapper[4703]: I0202 13:06:09.421217 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerStarted","Data":"773d64b87ef2d67e6422701aba6a0343d8108bc70d19a4fa7b6f2fbbeff93d5d"} Feb 02 13:06:09 crc kubenswrapper[4703]: I0202 13:06:09.424948 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerStarted","Data":"80a0bf2ed11ac8b1b7ecf06257a4225dab3b4136277c403de5e9eb87001317ef"} Feb 02 13:06:09 crc kubenswrapper[4703]: I0202 13:06:09.436678 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerStarted","Data":"9ff581dd16a21a18c7456e0429f48de220898260ff4ff5192c59b7760535ee69"} Feb 02 13:06:14 crc kubenswrapper[4703]: I0202 13:06:14.075205 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.480106 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.481376 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.483706 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-index-dockercfg-zn5z7" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.510807 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.518578 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerStarted","Data":"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb"} Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.582789 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-k5cdg\" (UniqueName: \"kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg\") pod \"rabbitmq-cluster-operator-index-7rjtc\" (UID: \"9f553de1-6c71-4fea-a39a-74fde703b099\") " pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.684372 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-k5cdg\" (UniqueName: \"kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg\") pod \"rabbitmq-cluster-operator-index-7rjtc\" (UID: \"9f553de1-6c71-4fea-a39a-74fde703b099\") " pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.703998 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-k5cdg\" (UniqueName: \"kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg\") pod \"rabbitmq-cluster-operator-index-7rjtc\" (UID: \"9f553de1-6c71-4fea-a39a-74fde703b099\") " pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:19 crc kubenswrapper[4703]: I0202 13:06:19.797202 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:22 crc kubenswrapper[4703]: I0202 13:06:22.327938 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:22 crc kubenswrapper[4703]: I0202 13:06:22.538483 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" event={"ID":"9f553de1-6c71-4fea-a39a-74fde703b099","Type":"ContainerStarted","Data":"5ab8aa9b16ebf253753cc2aa9c78c0d514adbb825aefbd3b73e4895f9180fabf"} Feb 02 13:06:22 crc kubenswrapper[4703]: I0202 13:06:22.539597 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerStarted","Data":"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5"} Feb 02 13:06:23 crc kubenswrapper[4703]: I0202 13:06:23.677712 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.285929 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.287510 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.299032 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.454716 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nwgdq\" (UniqueName: \"kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq\") pod \"rabbitmq-cluster-operator-index-f7km8\" (UID: \"01565f9f-8074-451e-9ebd-1b94124e364d\") " pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.552968 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerStarted","Data":"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e"} Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.556159 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nwgdq\" (UniqueName: \"kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq\") pod \"rabbitmq-cluster-operator-index-f7km8\" (UID: \"01565f9f-8074-451e-9ebd-1b94124e364d\") " pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.600349 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nwgdq\" (UniqueName: \"kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq\") pod \"rabbitmq-cluster-operator-index-f7km8\" (UID: \"01565f9f-8074-451e-9ebd-1b94124e364d\") " pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:24 crc kubenswrapper[4703]: I0202 13:06:24.603872 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:25 crc kubenswrapper[4703]: I0202 13:06:25.106836 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:06:25 crc kubenswrapper[4703]: I0202 13:06:25.558950 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" event={"ID":"01565f9f-8074-451e-9ebd-1b94124e364d","Type":"ContainerStarted","Data":"8533ecb6e1796d7efbfd15b012b447617b3ac84ff47423ad9be32c40879aa379"} Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.822788 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.825307 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.827989 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"memcached-memcached-dockercfg-vxjz9" Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.829408 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"memcached-config-data" Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.830330 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.938628 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlk96\" (UniqueName: \"kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.938954 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:28 crc kubenswrapper[4703]: I0202 13:06:28.939125 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.041928 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlk96\" (UniqueName: \"kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.042470 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.042768 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.043719 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.044041 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.063588 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlk96\" (UniqueName: \"kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96\") pod \"memcached-0\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.146579 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.596575 4703 generic.go:334] "Generic (PLEG): container finished" podID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerID="68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e" exitCode=0 Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.596662 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerDied","Data":"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e"} Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.602980 4703 generic.go:334] "Generic (PLEG): container finished" podID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerID="951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb" exitCode=0 Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.603070 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerDied","Data":"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb"} Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.609633 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" event={"ID":"9f553de1-6c71-4fea-a39a-74fde703b099","Type":"ContainerStarted","Data":"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9"} Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.609812 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" podUID="9f553de1-6c71-4fea-a39a-74fde703b099" containerName="registry-server" containerID="cri-o://edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9" gracePeriod=2 Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.612097 4703 generic.go:334] "Generic (PLEG): container finished" podID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerID="77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5" exitCode=0 Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.612163 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerDied","Data":"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5"} Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.627961 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" event={"ID":"01565f9f-8074-451e-9ebd-1b94124e364d","Type":"ContainerStarted","Data":"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478"} Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.648341 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.692986 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" podStartSLOduration=1.5238419460000001 podStartE2EDuration="5.692960468s" podCreationTimestamp="2026-02-02 13:06:24 +0000 UTC" firstStartedPulling="2026-02-02 13:06:25.128384437 +0000 UTC m=+912.143591971" lastFinishedPulling="2026-02-02 13:06:29.297502969 +0000 UTC m=+916.312710493" observedRunningTime="2026-02-02 13:06:29.676978769 +0000 UTC m=+916.692186303" watchObservedRunningTime="2026-02-02 13:06:29.692960468 +0000 UTC m=+916.708168002" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.709613 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" podStartSLOduration=3.751043836 podStartE2EDuration="10.709594937s" podCreationTimestamp="2026-02-02 13:06:19 +0000 UTC" firstStartedPulling="2026-02-02 13:06:22.349070309 +0000 UTC m=+909.364277833" lastFinishedPulling="2026-02-02 13:06:29.3076214 +0000 UTC m=+916.322828934" observedRunningTime="2026-02-02 13:06:29.704898202 +0000 UTC m=+916.720105746" watchObservedRunningTime="2026-02-02 13:06:29.709594937 +0000 UTC m=+916.724802471" Feb 02 13:06:29 crc kubenswrapper[4703]: I0202 13:06:29.803072 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.022685 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.166209 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-k5cdg\" (UniqueName: \"kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg\") pod \"9f553de1-6c71-4fea-a39a-74fde703b099\" (UID: \"9f553de1-6c71-4fea-a39a-74fde703b099\") " Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.170887 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg" (OuterVolumeSpecName: "kube-api-access-k5cdg") pod "9f553de1-6c71-4fea-a39a-74fde703b099" (UID: "9f553de1-6c71-4fea-a39a-74fde703b099"). InnerVolumeSpecName "kube-api-access-k5cdg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.268384 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-k5cdg\" (UniqueName: \"kubernetes.io/projected/9f553de1-6c71-4fea-a39a-74fde703b099-kube-api-access-k5cdg\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.635075 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e","Type":"ContainerStarted","Data":"cf7ef50fdb91b61e86e04120438f7122bf7bbf5f7c9e2b488cf89f0d0f7c1879"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.636910 4703 generic.go:334] "Generic (PLEG): container finished" podID="9f553de1-6c71-4fea-a39a-74fde703b099" containerID="edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9" exitCode=0 Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.636973 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" event={"ID":"9f553de1-6c71-4fea-a39a-74fde703b099","Type":"ContainerDied","Data":"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.637032 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" event={"ID":"9f553de1-6c71-4fea-a39a-74fde703b099","Type":"ContainerDied","Data":"5ab8aa9b16ebf253753cc2aa9c78c0d514adbb825aefbd3b73e4895f9180fabf"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.637062 4703 scope.go:117] "RemoveContainer" containerID="edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.637161 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-7rjtc" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.652682 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerStarted","Data":"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.657419 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerStarted","Data":"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.670499 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerStarted","Data":"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b"} Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.670595 4703 scope.go:117] "RemoveContainer" containerID="edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9" Feb 02 13:06:30 crc kubenswrapper[4703]: E0202 13:06:30.670861 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9\": container with ID starting with edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9 not found: ID does not exist" containerID="edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.670900 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9"} err="failed to get container status \"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9\": rpc error: code = NotFound desc = could not find container \"edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9\": container with ID starting with edea8d7c10845ff8bc10215fe1fa339406b1637c81a712ebed0323e214250fb9 not found: ID does not exist" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.675399 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.682406 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-7rjtc"] Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.698175 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-1" podStartSLOduration=9.373080147 podStartE2EDuration="24.698100472s" podCreationTimestamp="2026-02-02 13:06:06 +0000 UTC" firstStartedPulling="2026-02-02 13:06:08.613225674 +0000 UTC m=+895.628433208" lastFinishedPulling="2026-02-02 13:06:23.938245999 +0000 UTC m=+910.953453533" observedRunningTime="2026-02-02 13:06:30.686994902 +0000 UTC m=+917.702202436" watchObservedRunningTime="2026-02-02 13:06:30.698100472 +0000 UTC m=+917.713308026" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.706544 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-0" podStartSLOduration=11.368228419 podStartE2EDuration="24.706528364s" podCreationTimestamp="2026-02-02 13:06:06 +0000 UTC" firstStartedPulling="2026-02-02 13:06:08.55818402 +0000 UTC m=+895.573391554" lastFinishedPulling="2026-02-02 13:06:21.896483965 +0000 UTC m=+908.911691499" observedRunningTime="2026-02-02 13:06:30.704711492 +0000 UTC m=+917.719919036" watchObservedRunningTime="2026-02-02 13:06:30.706528364 +0000 UTC m=+917.721735898" Feb 02 13:06:30 crc kubenswrapper[4703]: I0202 13:06:30.729416 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/openstack-galera-2" podStartSLOduration=15.258223758 podStartE2EDuration="24.729395902s" podCreationTimestamp="2026-02-02 13:06:06 +0000 UTC" firstStartedPulling="2026-02-02 13:06:08.676123444 +0000 UTC m=+895.691330978" lastFinishedPulling="2026-02-02 13:06:18.147295588 +0000 UTC m=+905.162503122" observedRunningTime="2026-02-02 13:06:30.722462433 +0000 UTC m=+917.737669967" watchObservedRunningTime="2026-02-02 13:06:30.729395902 +0000 UTC m=+917.744603436" Feb 02 13:06:31 crc kubenswrapper[4703]: I0202 13:06:31.940703 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="9f553de1-6c71-4fea-a39a-74fde703b099" path="/var/lib/kubelet/pods/9f553de1-6c71-4fea-a39a-74fde703b099/volumes" Feb 02 13:06:32 crc kubenswrapper[4703]: I0202 13:06:32.683452 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e","Type":"ContainerStarted","Data":"1374f08e88226a477450c328cf22a3740fd88e1d8283aeb0727ff981782b3580"} Feb 02 13:06:32 crc kubenswrapper[4703]: I0202 13:06:32.683574 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:32 crc kubenswrapper[4703]: I0202 13:06:32.697794 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/memcached-0" podStartSLOduration=2.89361138 podStartE2EDuration="4.697770536s" podCreationTimestamp="2026-02-02 13:06:28 +0000 UTC" firstStartedPulling="2026-02-02 13:06:29.745963484 +0000 UTC m=+916.761171018" lastFinishedPulling="2026-02-02 13:06:31.55012264 +0000 UTC m=+918.565330174" observedRunningTime="2026-02-02 13:06:32.697462177 +0000 UTC m=+919.712669711" watchObservedRunningTime="2026-02-02 13:06:32.697770536 +0000 UTC m=+919.712978090" Feb 02 13:06:34 crc kubenswrapper[4703]: I0202 13:06:34.604165 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:34 crc kubenswrapper[4703]: I0202 13:06:34.604548 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:34 crc kubenswrapper[4703]: I0202 13:06:34.632310 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:34 crc kubenswrapper[4703]: I0202 13:06:34.719100 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.516745 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf"] Feb 02 13:06:36 crc kubenswrapper[4703]: E0202 13:06:36.517220 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="9f553de1-6c71-4fea-a39a-74fde703b099" containerName="registry-server" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.517233 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="9f553de1-6c71-4fea-a39a-74fde703b099" containerName="registry-server" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.517343 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="9f553de1-6c71-4fea-a39a-74fde703b099" containerName="registry-server" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.518133 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.521965 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t9m2k" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.532032 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf"] Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.651036 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.651147 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.651306 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-jns6l\" (UniqueName: \"kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.752994 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-jns6l\" (UniqueName: \"kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.753087 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.753182 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.753866 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.754033 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.773550 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-jns6l\" (UniqueName: \"kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l\") pod \"9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:36 crc kubenswrapper[4703]: I0202 13:06:36.835929 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:37 crc kubenswrapper[4703]: I0202 13:06:37.246718 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf"] Feb 02 13:06:37 crc kubenswrapper[4703]: I0202 13:06:37.714007 4703 generic.go:334] "Generic (PLEG): container finished" podID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerID="718471428c2a599fb59b394bd589db03c6f66227fda1c7971ddabf0389e19002" exitCode=0 Feb 02 13:06:37 crc kubenswrapper[4703]: I0202 13:06:37.714048 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" event={"ID":"277808c1-860f-4b6e-99a2-ad8d6031d334","Type":"ContainerDied","Data":"718471428c2a599fb59b394bd589db03c6f66227fda1c7971ddabf0389e19002"} Feb 02 13:06:37 crc kubenswrapper[4703]: I0202 13:06:37.714071 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" event={"ID":"277808c1-860f-4b6e-99a2-ad8d6031d334","Type":"ContainerStarted","Data":"e676f14d998f3fdfcec7affe70cf600a9c91c4d15b797bd2fdb4ae4644761eb5"} Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.092501 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.092541 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.118132 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.119014 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.137920 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.137973 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.232880 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.722522 4703 generic.go:334] "Generic (PLEG): container finished" podID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerID="c43246162d62b06d57f0df3d50eb625a27fa35ba826d3caa850bfa319b3dee0e" exitCode=0 Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.722651 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" event={"ID":"277808c1-860f-4b6e-99a2-ad8d6031d334","Type":"ContainerDied","Data":"c43246162d62b06d57f0df3d50eb625a27fa35ba826d3caa850bfa319b3dee0e"} Feb 02 13:06:38 crc kubenswrapper[4703]: I0202 13:06:38.818487 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:06:39 crc kubenswrapper[4703]: I0202 13:06:39.148042 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/memcached-0" Feb 02 13:06:39 crc kubenswrapper[4703]: I0202 13:06:39.732733 4703 generic.go:334] "Generic (PLEG): container finished" podID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerID="9114e1c12592b255afe4208fc64751a92b2f3e6c92ac430712712795fcac4d4d" exitCode=0 Feb 02 13:06:39 crc kubenswrapper[4703]: I0202 13:06:39.732771 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" event={"ID":"277808c1-860f-4b6e-99a2-ad8d6031d334","Type":"ContainerDied","Data":"9114e1c12592b255afe4208fc64751a92b2f3e6c92ac430712712795fcac4d4d"} Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.093880 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.266744 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle\") pod \"277808c1-860f-4b6e-99a2-ad8d6031d334\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.267130 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-jns6l\" (UniqueName: \"kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l\") pod \"277808c1-860f-4b6e-99a2-ad8d6031d334\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.267336 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util\") pod \"277808c1-860f-4b6e-99a2-ad8d6031d334\" (UID: \"277808c1-860f-4b6e-99a2-ad8d6031d334\") " Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.267435 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle" (OuterVolumeSpecName: "bundle") pod "277808c1-860f-4b6e-99a2-ad8d6031d334" (UID: "277808c1-860f-4b6e-99a2-ad8d6031d334"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.268005 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.272971 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l" (OuterVolumeSpecName: "kube-api-access-jns6l") pod "277808c1-860f-4b6e-99a2-ad8d6031d334" (UID: "277808c1-860f-4b6e-99a2-ad8d6031d334"). InnerVolumeSpecName "kube-api-access-jns6l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.282332 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util" (OuterVolumeSpecName: "util") pod "277808c1-860f-4b6e-99a2-ad8d6031d334" (UID: "277808c1-860f-4b6e-99a2-ad8d6031d334"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.369833 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/277808c1-860f-4b6e-99a2-ad8d6031d334-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.369874 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-jns6l\" (UniqueName: \"kubernetes.io/projected/277808c1-860f-4b6e-99a2-ad8d6031d334-kube-api-access-jns6l\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.748235 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" event={"ID":"277808c1-860f-4b6e-99a2-ad8d6031d334","Type":"ContainerDied","Data":"e676f14d998f3fdfcec7affe70cf600a9c91c4d15b797bd2fdb4ae4644761eb5"} Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.748288 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="e676f14d998f3fdfcec7affe70cf600a9c91c4d15b797bd2fdb4ae4644761eb5" Feb 02 13:06:41 crc kubenswrapper[4703]: I0202 13:06:41.748340 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.815845 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/root-account-create-update-l9fxz"] Feb 02 13:06:46 crc kubenswrapper[4703]: E0202 13:06:46.816657 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="util" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.816673 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="util" Feb 02 13:06:46 crc kubenswrapper[4703]: E0202 13:06:46.816683 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="pull" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.816688 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="pull" Feb 02 13:06:46 crc kubenswrapper[4703]: E0202 13:06:46.816696 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="extract" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.816702 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="extract" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.816809 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" containerName="extract" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.817190 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.819330 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"openstack-mariadb-root-db-secret" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.822409 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/root-account-create-update-l9fxz"] Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.945571 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:46 crc kubenswrapper[4703]: I0202 13:06:46.945994 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-58brl\" (UniqueName: \"kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:47 crc kubenswrapper[4703]: I0202 13:06:47.048263 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-58brl\" (UniqueName: \"kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:47 crc kubenswrapper[4703]: I0202 13:06:47.049244 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:47 crc kubenswrapper[4703]: I0202 13:06:47.050054 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:47 crc kubenswrapper[4703]: I0202 13:06:47.082009 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-58brl\" (UniqueName: \"kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl\") pod \"root-account-create-update-l9fxz\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:47 crc kubenswrapper[4703]: I0202 13:06:47.144883 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:48 crc kubenswrapper[4703]: I0202 13:06:48.217131 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="manila-kuttl-tests/openstack-galera-2" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="galera" probeResult="failure" output=< Feb 02 13:06:48 crc kubenswrapper[4703]: wsrep_local_state_comment (Donor/Desynced) differs from Synced Feb 02 13:06:48 crc kubenswrapper[4703]: > Feb 02 13:06:49 crc kubenswrapper[4703]: I0202 13:06:49.108161 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/root-account-create-update-l9fxz"] Feb 02 13:06:49 crc kubenswrapper[4703]: W0202 13:06:49.118182 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod5f15ab71_7ffd_4ff7_83db_9a1bd624cb88.slice/crio-7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a WatchSource:0}: Error finding container 7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a: Status 404 returned error can't find the container with id 7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a Feb 02 13:06:49 crc kubenswrapper[4703]: I0202 13:06:49.810583 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/root-account-create-update-l9fxz" event={"ID":"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88","Type":"ContainerStarted","Data":"4a5f20bbeae70aac00cfed9900111ded3e3ff5a9b7e7c4a6244c74d285d769e4"} Feb 02 13:06:49 crc kubenswrapper[4703]: I0202 13:06:49.810678 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/root-account-create-update-l9fxz" event={"ID":"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88","Type":"ContainerStarted","Data":"7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a"} Feb 02 13:06:49 crc kubenswrapper[4703]: I0202 13:06:49.846721 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/root-account-create-update-l9fxz" podStartSLOduration=3.846697557 podStartE2EDuration="3.846697557s" podCreationTimestamp="2026-02-02 13:06:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:06:49.843827965 +0000 UTC m=+936.859035499" watchObservedRunningTime="2026-02-02 13:06:49.846697557 +0000 UTC m=+936.861905131" Feb 02 13:06:51 crc kubenswrapper[4703]: I0202 13:06:51.825178 4703 generic.go:334] "Generic (PLEG): container finished" podID="5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" containerID="4a5f20bbeae70aac00cfed9900111ded3e3ff5a9b7e7c4a6244c74d285d769e4" exitCode=0 Feb 02 13:06:51 crc kubenswrapper[4703]: I0202 13:06:51.825328 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/root-account-create-update-l9fxz" event={"ID":"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88","Type":"ContainerDied","Data":"4a5f20bbeae70aac00cfed9900111ded3e3ff5a9b7e7c4a6244c74d285d769e4"} Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.076025 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.077149 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.098571 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.218028 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6hlkt\" (UniqueName: \"kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.218186 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.218262 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.319150 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6hlkt\" (UniqueName: \"kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.319219 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.319245 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.319777 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.319864 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.352555 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6hlkt\" (UniqueName: \"kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt\") pod \"redhat-marketplace-8kstc\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.402018 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.810202 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:06:52 crc kubenswrapper[4703]: I0202 13:06:52.838736 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerStarted","Data":"c6eeee34fd0a5177b4f3c3006abd7b86667a1a9954d74486a7e8c99254879f30"} Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.165152 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.236511 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-58brl\" (UniqueName: \"kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl\") pod \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.236592 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts\") pod \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\" (UID: \"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88\") " Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.237794 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" (UID: "5f15ab71-7ffd-4ff7-83db-9a1bd624cb88"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.244304 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl" (OuterVolumeSpecName: "kube-api-access-58brl") pod "5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" (UID: "5f15ab71-7ffd-4ff7-83db-9a1bd624cb88"). InnerVolumeSpecName "kube-api-access-58brl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.337924 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-58brl\" (UniqueName: \"kubernetes.io/projected/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-kube-api-access-58brl\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.337973 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.846399 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/root-account-create-update-l9fxz" event={"ID":"5f15ab71-7ffd-4ff7-83db-9a1bd624cb88","Type":"ContainerDied","Data":"7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a"} Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.846441 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7ab6f2f887bc15c75bdbc54f27fc0d836a93696d7be4509d8b0d94d46f2ddf5a" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.846452 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/root-account-create-update-l9fxz" Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.848428 4703 generic.go:334] "Generic (PLEG): container finished" podID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerID="dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63" exitCode=0 Feb 02 13:06:53 crc kubenswrapper[4703]: I0202 13:06:53.848518 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerDied","Data":"dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63"} Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.623697 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.713649 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.856765 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerStarted","Data":"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63"} Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.887419 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:06:54 crc kubenswrapper[4703]: E0202 13:06:54.887730 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" containerName="mariadb-account-create-update" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.887747 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" containerName="mariadb-account-create-update" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.887891 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" containerName="mariadb-account-create-update" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.889084 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:54 crc kubenswrapper[4703]: I0202 13:06:54.903021 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.065175 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h7j26\" (UniqueName: \"kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.065242 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.065308 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.166889 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.167380 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h7j26\" (UniqueName: \"kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.167491 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.167606 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.167982 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.188189 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h7j26\" (UniqueName: \"kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26\") pod \"redhat-operators-h94dn\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.214413 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.458466 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.864064 4703 generic.go:334] "Generic (PLEG): container finished" podID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerID="9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63" exitCode=0 Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.864142 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerDied","Data":"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63"} Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.866476 4703 generic.go:334] "Generic (PLEG): container finished" podID="10075e21-6f03-425d-afe1-18896de4c242" containerID="8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82" exitCode=0 Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.866509 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerDied","Data":"8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82"} Feb 02 13:06:55 crc kubenswrapper[4703]: I0202 13:06:55.866531 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerStarted","Data":"b996ae85afd8db39b24aef2490cc449062a6e16fb07ef884217cb12c2f624a39"} Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.697163 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.713924 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.714054 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.717878 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"rabbitmq-cluster-operator-dockercfg-mbz5j" Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.795977 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-fbknm\" (UniqueName: \"kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm\") pod \"rabbitmq-cluster-operator-779fc9694b-hlt8r\" (UID: \"f3e89261-c5b4-429d-b84e-4a89fa66b98b\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.883531 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerStarted","Data":"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf"} Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.886262 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerStarted","Data":"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f"} Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.897384 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-fbknm\" (UniqueName: \"kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm\") pod \"rabbitmq-cluster-operator-779fc9694b-hlt8r\" (UID: \"f3e89261-c5b4-429d-b84e-4a89fa66b98b\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.914434 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-8kstc" podStartSLOduration=2.496590764 podStartE2EDuration="4.91441603s" podCreationTimestamp="2026-02-02 13:06:52 +0000 UTC" firstStartedPulling="2026-02-02 13:06:53.850502432 +0000 UTC m=+940.865709966" lastFinishedPulling="2026-02-02 13:06:56.268327698 +0000 UTC m=+943.283535232" observedRunningTime="2026-02-02 13:06:56.908550241 +0000 UTC m=+943.923757775" watchObservedRunningTime="2026-02-02 13:06:56.91441603 +0000 UTC m=+943.929623554" Feb 02 13:06:56 crc kubenswrapper[4703]: I0202 13:06:56.935127 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-fbknm\" (UniqueName: \"kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm\") pod \"rabbitmq-cluster-operator-779fc9694b-hlt8r\" (UID: \"f3e89261-c5b4-429d-b84e-4a89fa66b98b\") " pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:06:57 crc kubenswrapper[4703]: I0202 13:06:57.088627 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:06:57 crc kubenswrapper[4703]: I0202 13:06:57.484948 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:06:57 crc kubenswrapper[4703]: I0202 13:06:57.895397 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" event={"ID":"f3e89261-c5b4-429d-b84e-4a89fa66b98b","Type":"ContainerStarted","Data":"7d552f058d366cbbe5f08c4e8a59624f0b839ebee56580079e1338aefa9872a7"} Feb 02 13:06:57 crc kubenswrapper[4703]: I0202 13:06:57.900165 4703 generic.go:334] "Generic (PLEG): container finished" podID="10075e21-6f03-425d-afe1-18896de4c242" containerID="a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f" exitCode=0 Feb 02 13:06:57 crc kubenswrapper[4703]: I0202 13:06:57.901929 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerDied","Data":"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f"} Feb 02 13:06:58 crc kubenswrapper[4703]: I0202 13:06:58.913002 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerStarted","Data":"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c"} Feb 02 13:06:58 crc kubenswrapper[4703]: I0202 13:06:58.939685 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-h94dn" podStartSLOduration=2.283915336 podStartE2EDuration="4.939667799s" podCreationTimestamp="2026-02-02 13:06:54 +0000 UTC" firstStartedPulling="2026-02-02 13:06:55.867206535 +0000 UTC m=+942.882414069" lastFinishedPulling="2026-02-02 13:06:58.522958998 +0000 UTC m=+945.538166532" observedRunningTime="2026-02-02 13:06:58.932616946 +0000 UTC m=+945.947824490" watchObservedRunningTime="2026-02-02 13:06:58.939667799 +0000 UTC m=+945.954875353" Feb 02 13:07:00 crc kubenswrapper[4703]: I0202 13:07:00.073648 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:07:00 crc kubenswrapper[4703]: I0202 13:07:00.157641 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:07:01 crc kubenswrapper[4703]: I0202 13:07:01.929160 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" event={"ID":"f3e89261-c5b4-429d-b84e-4a89fa66b98b","Type":"ContainerStarted","Data":"9a453d5555c17faa4dda9b09d374ce9c18449d8a777be0db00b321b90d77356e"} Feb 02 13:07:01 crc kubenswrapper[4703]: I0202 13:07:01.948831 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" podStartSLOduration=2.4981801949999998 podStartE2EDuration="5.948806701s" podCreationTimestamp="2026-02-02 13:06:56 +0000 UTC" firstStartedPulling="2026-02-02 13:06:57.491955309 +0000 UTC m=+944.507162843" lastFinishedPulling="2026-02-02 13:07:00.942581815 +0000 UTC m=+947.957789349" observedRunningTime="2026-02-02 13:07:01.942151969 +0000 UTC m=+948.957359523" watchObservedRunningTime="2026-02-02 13:07:01.948806701 +0000 UTC m=+948.964014235" Feb 02 13:07:02 crc kubenswrapper[4703]: I0202 13:07:02.402242 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:02 crc kubenswrapper[4703]: I0202 13:07:02.402413 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:02 crc kubenswrapper[4703]: I0202 13:07:02.466533 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:02 crc kubenswrapper[4703]: I0202 13:07:02.976136 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.685084 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.686682 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.709561 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.800090 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.800156 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.800176 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dk4gl\" (UniqueName: \"kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.902302 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.902372 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.902394 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dk4gl\" (UniqueName: \"kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.902854 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.902896 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:03 crc kubenswrapper[4703]: I0202 13:07:03.928016 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dk4gl\" (UniqueName: \"kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl\") pod \"certified-operators-9wgnw\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:04 crc kubenswrapper[4703]: I0202 13:07:04.005835 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:04 crc kubenswrapper[4703]: I0202 13:07:04.467929 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:04 crc kubenswrapper[4703]: I0202 13:07:04.946336 4703 generic.go:334] "Generic (PLEG): container finished" podID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerID="78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54" exitCode=0 Feb 02 13:07:04 crc kubenswrapper[4703]: I0202 13:07:04.946433 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerDied","Data":"78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54"} Feb 02 13:07:04 crc kubenswrapper[4703]: I0202 13:07:04.946869 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerStarted","Data":"c0d66f2bb5baf3642f7c68da9fbf4fd729b8105c120993ff98e8c6cb01103282"} Feb 02 13:07:05 crc kubenswrapper[4703]: I0202 13:07:05.214939 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:05 crc kubenswrapper[4703]: I0202 13:07:05.214995 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:05 crc kubenswrapper[4703]: I0202 13:07:05.253473 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:05 crc kubenswrapper[4703]: I0202 13:07:05.956864 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerStarted","Data":"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145"} Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.004364 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.382468 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.384398 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.390876 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"rabbitmq-server-conf" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.391151 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-erlang-cookie" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.391668 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-default-user" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.393635 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"manila-kuttl-tests"/"rabbitmq-plugins-conf" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.395363 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.396587 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"rabbitmq-server-dockercfg-d7zqp" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577262 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577405 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577448 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577651 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577711 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-h9hp6\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577856 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.577907 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.578008 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679504 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679546 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679564 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679588 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679609 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-h9hp6\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679651 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679673 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.679701 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.681284 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.681342 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.681755 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.683748 4703 csi_attacher.go:380] kubernetes.io/csi: attacher.MountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping MountDevice... Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.683794 4703 operation_generator.go:580] "MountVolume.MountDevice succeeded for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") device mount path \"/var/lib/kubelet/plugins/kubernetes.io/csi/kubevirt.io.hostpath-provisioner/f5543d749e2e8e3e39853be16aa4fe7a4e5733c5c90661f0a2c98fbc7d2bbd3d/globalmount\"" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.685960 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.686244 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.692156 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.710002 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-h9hp6\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.716710 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") pod \"rabbitmq-server-0\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.759143 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.963928 4703 generic.go:334] "Generic (PLEG): container finished" podID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerID="5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145" exitCode=0 Feb 02 13:07:06 crc kubenswrapper[4703]: I0202 13:07:06.964011 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerDied","Data":"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145"} Feb 02 13:07:07 crc kubenswrapper[4703]: I0202 13:07:07.359468 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:07:07 crc kubenswrapper[4703]: I0202 13:07:07.972917 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerStarted","Data":"28cec77f64946aee73adc51c1313afba8326ed75e495f658c119be4698610569"} Feb 02 13:07:07 crc kubenswrapper[4703]: I0202 13:07:07.974947 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerStarted","Data":"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173"} Feb 02 13:07:07 crc kubenswrapper[4703]: I0202 13:07:07.992644 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-9wgnw" podStartSLOduration=2.525374361 podStartE2EDuration="4.992621189s" podCreationTimestamp="2026-02-02 13:07:03 +0000 UTC" firstStartedPulling="2026-02-02 13:07:04.94815187 +0000 UTC m=+951.963359404" lastFinishedPulling="2026-02-02 13:07:07.415398698 +0000 UTC m=+954.430606232" observedRunningTime="2026-02-02 13:07:07.991521047 +0000 UTC m=+955.006728601" watchObservedRunningTime="2026-02-02 13:07:07.992621189 +0000 UTC m=+955.007828723" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.474803 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.476268 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.495519 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.605003 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.605203 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.605241 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-bjct4\" (UniqueName: \"kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.707047 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.707099 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-bjct4\" (UniqueName: \"kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.707161 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.707615 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.707675 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.729331 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-bjct4\" (UniqueName: \"kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4\") pod \"community-operators-45sdh\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.847197 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.869923 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:07:08 crc kubenswrapper[4703]: I0202 13:07:08.870184 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-8kstc" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="registry-server" containerID="cri-o://b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf" gracePeriod=2 Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.330780 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.354015 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.517836 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content\") pod \"5b0812c1-d2be-4907-9559-35df7f0425fb\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.518102 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities\") pod \"5b0812c1-d2be-4907-9559-35df7f0425fb\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.518165 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6hlkt\" (UniqueName: \"kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt\") pod \"5b0812c1-d2be-4907-9559-35df7f0425fb\" (UID: \"5b0812c1-d2be-4907-9559-35df7f0425fb\") " Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.519206 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities" (OuterVolumeSpecName: "utilities") pod "5b0812c1-d2be-4907-9559-35df7f0425fb" (UID: "5b0812c1-d2be-4907-9559-35df7f0425fb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.525052 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt" (OuterVolumeSpecName: "kube-api-access-6hlkt") pod "5b0812c1-d2be-4907-9559-35df7f0425fb" (UID: "5b0812c1-d2be-4907-9559-35df7f0425fb"). InnerVolumeSpecName "kube-api-access-6hlkt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.550725 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5b0812c1-d2be-4907-9559-35df7f0425fb" (UID: "5b0812c1-d2be-4907-9559-35df7f0425fb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.619715 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.619750 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6hlkt\" (UniqueName: \"kubernetes.io/projected/5b0812c1-d2be-4907-9559-35df7f0425fb-kube-api-access-6hlkt\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.619762 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5b0812c1-d2be-4907-9559-35df7f0425fb-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.993864 4703 generic.go:334] "Generic (PLEG): container finished" podID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerID="b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf" exitCode=0 Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.993948 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerDied","Data":"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf"} Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.993995 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-8kstc" event={"ID":"5b0812c1-d2be-4907-9559-35df7f0425fb","Type":"ContainerDied","Data":"c6eeee34fd0a5177b4f3c3006abd7b86667a1a9954d74486a7e8c99254879f30"} Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.994013 4703 scope.go:117] "RemoveContainer" containerID="b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.994210 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-8kstc" Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.997306 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerDied","Data":"122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04"} Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.997342 4703 generic.go:334] "Generic (PLEG): container finished" podID="08d236b1-2949-49f6-86d9-f8e15020157f" containerID="122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04" exitCode=0 Feb 02 13:07:09 crc kubenswrapper[4703]: I0202 13:07:09.997400 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerStarted","Data":"d13c861a752e3937bf61383bdc23f5dd4b7263607a966b1a07d9617bb0be9935"} Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.019522 4703 scope.go:117] "RemoveContainer" containerID="9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.029076 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.038656 4703 scope.go:117] "RemoveContainer" containerID="dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.042517 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-8kstc"] Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.065774 4703 scope.go:117] "RemoveContainer" containerID="b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf" Feb 02 13:07:10 crc kubenswrapper[4703]: E0202 13:07:10.068325 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf\": container with ID starting with b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf not found: ID does not exist" containerID="b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.068598 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf"} err="failed to get container status \"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf\": rpc error: code = NotFound desc = could not find container \"b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf\": container with ID starting with b853a29e53b54f01c0f50a7b668a1d66867022bbbad81134d7a4f036631326cf not found: ID does not exist" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.068851 4703 scope.go:117] "RemoveContainer" containerID="9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63" Feb 02 13:07:10 crc kubenswrapper[4703]: E0202 13:07:10.069344 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63\": container with ID starting with 9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63 not found: ID does not exist" containerID="9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.069443 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63"} err="failed to get container status \"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63\": rpc error: code = NotFound desc = could not find container \"9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63\": container with ID starting with 9987eb7c6078663a983c5e02f32b3fe7a23e3fa3a07c69d897dcb8064a5d7b63 not found: ID does not exist" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.069521 4703 scope.go:117] "RemoveContainer" containerID="dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63" Feb 02 13:07:10 crc kubenswrapper[4703]: E0202 13:07:10.070297 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63\": container with ID starting with dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63 not found: ID does not exist" containerID="dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63" Feb 02 13:07:10 crc kubenswrapper[4703]: I0202 13:07:10.070336 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63"} err="failed to get container status \"dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63\": rpc error: code = NotFound desc = could not find container \"dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63\": container with ID starting with dca8f6cc3ca014f40981aefb3cf28a34525c86fb81d3cdf81a4a9b67e1383a63 not found: ID does not exist" Feb 02 13:07:11 crc kubenswrapper[4703]: I0202 13:07:11.944773 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" path="/var/lib/kubelet/pods/5b0812c1-d2be-4907-9559-35df7f0425fb/volumes" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.879631 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:07:12 crc kubenswrapper[4703]: E0202 13:07:12.880386 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="extract-content" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.880454 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="extract-content" Feb 02 13:07:12 crc kubenswrapper[4703]: E0202 13:07:12.880479 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="registry-server" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.880487 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="registry-server" Feb 02 13:07:12 crc kubenswrapper[4703]: E0202 13:07:12.880504 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="extract-utilities" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.880511 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="extract-utilities" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.880761 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="5b0812c1-d2be-4907-9559-35df7f0425fb" containerName="registry-server" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.881349 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.886401 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-index-dockercfg-8vvll" Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.887523 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:07:12 crc kubenswrapper[4703]: I0202 13:07:12.968515 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4tvm4\" (UniqueName: \"kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4\") pod \"keystone-operator-index-7v4d4\" (UID: \"3b5301e6-77c6-4fd1-a97c-8ec7be30d794\") " pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:13 crc kubenswrapper[4703]: I0202 13:07:13.071091 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4tvm4\" (UniqueName: \"kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4\") pod \"keystone-operator-index-7v4d4\" (UID: \"3b5301e6-77c6-4fd1-a97c-8ec7be30d794\") " pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:13 crc kubenswrapper[4703]: I0202 13:07:13.093898 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4tvm4\" (UniqueName: \"kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4\") pod \"keystone-operator-index-7v4d4\" (UID: \"3b5301e6-77c6-4fd1-a97c-8ec7be30d794\") " pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:13 crc kubenswrapper[4703]: I0202 13:07:13.209083 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:14 crc kubenswrapper[4703]: I0202 13:07:14.006773 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:14 crc kubenswrapper[4703]: I0202 13:07:14.007919 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:14 crc kubenswrapper[4703]: I0202 13:07:14.052814 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:14 crc kubenswrapper[4703]: I0202 13:07:14.099165 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:14 crc kubenswrapper[4703]: I0202 13:07:14.815249 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:07:14 crc kubenswrapper[4703]: W0202 13:07:14.824058 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3b5301e6_77c6_4fd1_a97c_8ec7be30d794.slice/crio-c3e0a0fdeb8249bb57ebb28973662325900338ff3c283ad346d632d69c17c9ef WatchSource:0}: Error finding container c3e0a0fdeb8249bb57ebb28973662325900338ff3c283ad346d632d69c17c9ef: Status 404 returned error can't find the container with id c3e0a0fdeb8249bb57ebb28973662325900338ff3c283ad346d632d69c17c9ef Feb 02 13:07:15 crc kubenswrapper[4703]: I0202 13:07:15.030055 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-7v4d4" event={"ID":"3b5301e6-77c6-4fd1-a97c-8ec7be30d794","Type":"ContainerStarted","Data":"c3e0a0fdeb8249bb57ebb28973662325900338ff3c283ad346d632d69c17c9ef"} Feb 02 13:07:15 crc kubenswrapper[4703]: I0202 13:07:15.032727 4703 generic.go:334] "Generic (PLEG): container finished" podID="08d236b1-2949-49f6-86d9-f8e15020157f" containerID="d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476" exitCode=0 Feb 02 13:07:15 crc kubenswrapper[4703]: I0202 13:07:15.033221 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerDied","Data":"d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476"} Feb 02 13:07:16 crc kubenswrapper[4703]: I0202 13:07:16.040655 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-7v4d4" event={"ID":"3b5301e6-77c6-4fd1-a97c-8ec7be30d794","Type":"ContainerStarted","Data":"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a"} Feb 02 13:07:16 crc kubenswrapper[4703]: I0202 13:07:16.043196 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerStarted","Data":"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70"} Feb 02 13:07:16 crc kubenswrapper[4703]: I0202 13:07:16.044928 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerStarted","Data":"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c"} Feb 02 13:07:16 crc kubenswrapper[4703]: I0202 13:07:16.059678 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-index-7v4d4" podStartSLOduration=3.157629591 podStartE2EDuration="4.059662028s" podCreationTimestamp="2026-02-02 13:07:12 +0000 UTC" firstStartedPulling="2026-02-02 13:07:14.826762589 +0000 UTC m=+961.841970123" lastFinishedPulling="2026-02-02 13:07:15.728795026 +0000 UTC m=+962.744002560" observedRunningTime="2026-02-02 13:07:16.053305205 +0000 UTC m=+963.068512759" watchObservedRunningTime="2026-02-02 13:07:16.059662028 +0000 UTC m=+963.074869562" Feb 02 13:07:16 crc kubenswrapper[4703]: I0202 13:07:16.099295 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-45sdh" podStartSLOduration=2.372028348 podStartE2EDuration="8.099257417s" podCreationTimestamp="2026-02-02 13:07:08 +0000 UTC" firstStartedPulling="2026-02-02 13:07:10.001373512 +0000 UTC m=+957.016581046" lastFinishedPulling="2026-02-02 13:07:15.728602581 +0000 UTC m=+962.743810115" observedRunningTime="2026-02-02 13:07:16.094349426 +0000 UTC m=+963.109556960" watchObservedRunningTime="2026-02-02 13:07:16.099257417 +0000 UTC m=+963.114464951" Feb 02 13:07:17 crc kubenswrapper[4703]: I0202 13:07:17.489856 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:07:17 crc kubenswrapper[4703]: I0202 13:07:17.490374 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-h94dn" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="registry-server" containerID="cri-o://65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c" gracePeriod=2 Feb 02 13:07:17 crc kubenswrapper[4703]: I0202 13:07:17.943147 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.043144 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities\") pod \"10075e21-6f03-425d-afe1-18896de4c242\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.043199 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h7j26\" (UniqueName: \"kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26\") pod \"10075e21-6f03-425d-afe1-18896de4c242\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.043224 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content\") pod \"10075e21-6f03-425d-afe1-18896de4c242\" (UID: \"10075e21-6f03-425d-afe1-18896de4c242\") " Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.044551 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities" (OuterVolumeSpecName: "utilities") pod "10075e21-6f03-425d-afe1-18896de4c242" (UID: "10075e21-6f03-425d-afe1-18896de4c242"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.049783 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26" (OuterVolumeSpecName: "kube-api-access-h7j26") pod "10075e21-6f03-425d-afe1-18896de4c242" (UID: "10075e21-6f03-425d-afe1-18896de4c242"). InnerVolumeSpecName "kube-api-access-h7j26". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.075058 4703 generic.go:334] "Generic (PLEG): container finished" podID="10075e21-6f03-425d-afe1-18896de4c242" containerID="65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c" exitCode=0 Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.075120 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerDied","Data":"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c"} Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.075154 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-h94dn" event={"ID":"10075e21-6f03-425d-afe1-18896de4c242","Type":"ContainerDied","Data":"b996ae85afd8db39b24aef2490cc449062a6e16fb07ef884217cb12c2f624a39"} Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.075174 4703 scope.go:117] "RemoveContainer" containerID="65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.075322 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-h94dn" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.095917 4703 scope.go:117] "RemoveContainer" containerID="a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.113879 4703 scope.go:117] "RemoveContainer" containerID="8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.146438 4703 scope.go:117] "RemoveContainer" containerID="65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.147187 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.147208 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h7j26\" (UniqueName: \"kubernetes.io/projected/10075e21-6f03-425d-afe1-18896de4c242-kube-api-access-h7j26\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:18 crc kubenswrapper[4703]: E0202 13:07:18.148652 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c\": container with ID starting with 65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c not found: ID does not exist" containerID="65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.148697 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c"} err="failed to get container status \"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c\": rpc error: code = NotFound desc = could not find container \"65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c\": container with ID starting with 65744d4a0a62a39f0edcac20e226a634697b28719b1033ac541ada58f5d1eb2c not found: ID does not exist" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.148721 4703 scope.go:117] "RemoveContainer" containerID="a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f" Feb 02 13:07:18 crc kubenswrapper[4703]: E0202 13:07:18.149033 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f\": container with ID starting with a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f not found: ID does not exist" containerID="a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.149056 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f"} err="failed to get container status \"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f\": rpc error: code = NotFound desc = could not find container \"a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f\": container with ID starting with a6b8cae83960ef43f2f361d8878cde704543b9b40d6b1a01ebf62327192dc06f not found: ID does not exist" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.149068 4703 scope.go:117] "RemoveContainer" containerID="8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82" Feb 02 13:07:18 crc kubenswrapper[4703]: E0202 13:07:18.150724 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82\": container with ID starting with 8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82 not found: ID does not exist" containerID="8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.150766 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82"} err="failed to get container status \"8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82\": rpc error: code = NotFound desc = could not find container \"8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82\": container with ID starting with 8b8bdce422794934952a91c1c296912bf10714cc4ebf87392f6266072a603b82 not found: ID does not exist" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.169693 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "10075e21-6f03-425d-afe1-18896de4c242" (UID: "10075e21-6f03-425d-afe1-18896de4c242"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.248343 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/10075e21-6f03-425d-afe1-18896de4c242-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.413753 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.421391 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-h94dn"] Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.847655 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.847718 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:18 crc kubenswrapper[4703]: I0202 13:07:18.895383 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:19 crc kubenswrapper[4703]: I0202 13:07:19.941266 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="10075e21-6f03-425d-afe1-18896de4c242" path="/var/lib/kubelet/pods/10075e21-6f03-425d-afe1-18896de4c242/volumes" Feb 02 13:07:23 crc kubenswrapper[4703]: I0202 13:07:23.209606 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:23 crc kubenswrapper[4703]: I0202 13:07:23.209675 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:23 crc kubenswrapper[4703]: I0202 13:07:23.238105 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.141560 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.268344 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.269258 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-9wgnw" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="registry-server" containerID="cri-o://5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173" gracePeriod=2 Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.712380 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.835354 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities\") pod \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.835439 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content\") pod \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.835546 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dk4gl\" (UniqueName: \"kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl\") pod \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\" (UID: \"1302e5cf-bb3a-47d9-84fd-488cfc19acc3\") " Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.836641 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities" (OuterVolumeSpecName: "utilities") pod "1302e5cf-bb3a-47d9-84fd-488cfc19acc3" (UID: "1302e5cf-bb3a-47d9-84fd-488cfc19acc3"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.850446 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl" (OuterVolumeSpecName: "kube-api-access-dk4gl") pod "1302e5cf-bb3a-47d9-84fd-488cfc19acc3" (UID: "1302e5cf-bb3a-47d9-84fd-488cfc19acc3"). InnerVolumeSpecName "kube-api-access-dk4gl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.888292 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "1302e5cf-bb3a-47d9-84fd-488cfc19acc3" (UID: "1302e5cf-bb3a-47d9-84fd-488cfc19acc3"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.937415 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.937462 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:24 crc kubenswrapper[4703]: I0202 13:07:24.937483 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dk4gl\" (UniqueName: \"kubernetes.io/projected/1302e5cf-bb3a-47d9-84fd-488cfc19acc3-kube-api-access-dk4gl\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.117895 4703 generic.go:334] "Generic (PLEG): container finished" podID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerID="5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173" exitCode=0 Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.118214 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-9wgnw" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.118110 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerDied","Data":"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173"} Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.118326 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-9wgnw" event={"ID":"1302e5cf-bb3a-47d9-84fd-488cfc19acc3","Type":"ContainerDied","Data":"c0d66f2bb5baf3642f7c68da9fbf4fd729b8105c120993ff98e8c6cb01103282"} Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.118349 4703 scope.go:117] "RemoveContainer" containerID="5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.139903 4703 scope.go:117] "RemoveContainer" containerID="5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.151240 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.164020 4703 scope.go:117] "RemoveContainer" containerID="78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.166037 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-9wgnw"] Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.193608 4703 scope.go:117] "RemoveContainer" containerID="5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173" Feb 02 13:07:25 crc kubenswrapper[4703]: E0202 13:07:25.194048 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173\": container with ID starting with 5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173 not found: ID does not exist" containerID="5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.194088 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173"} err="failed to get container status \"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173\": rpc error: code = NotFound desc = could not find container \"5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173\": container with ID starting with 5e1d7dee2a8316e7ed2f2f3ef31627c9138dcde505bc0a2e3c4afcc9266b6173 not found: ID does not exist" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.194115 4703 scope.go:117] "RemoveContainer" containerID="5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145" Feb 02 13:07:25 crc kubenswrapper[4703]: E0202 13:07:25.194551 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145\": container with ID starting with 5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145 not found: ID does not exist" containerID="5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.194607 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145"} err="failed to get container status \"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145\": rpc error: code = NotFound desc = could not find container \"5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145\": container with ID starting with 5d543016c70ab8b6249501556ee2d96c5bda5f5e6dbc001362fa805e6053c145 not found: ID does not exist" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.194643 4703 scope.go:117] "RemoveContainer" containerID="78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54" Feb 02 13:07:25 crc kubenswrapper[4703]: E0202 13:07:25.194971 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54\": container with ID starting with 78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54 not found: ID does not exist" containerID="78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.195005 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54"} err="failed to get container status \"78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54\": rpc error: code = NotFound desc = could not find container \"78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54\": container with ID starting with 78a81a3e943750597d3269d8d2087513d103ff877d3734ad802e550548b8cd54 not found: ID does not exist" Feb 02 13:07:25 crc kubenswrapper[4703]: I0202 13:07:25.941330 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" path="/var/lib/kubelet/pods/1302e5cf-bb3a-47d9-84fd-488cfc19acc3/volumes" Feb 02 13:07:28 crc kubenswrapper[4703]: I0202 13:07:28.889951 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.317184 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx"] Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.317714 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.317798 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.317864 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.317923 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.317992 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="extract-utilities" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318046 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="extract-utilities" Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.318117 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="extract-content" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318198 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="extract-content" Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.318301 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="extract-content" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318378 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="extract-content" Feb 02 13:07:29 crc kubenswrapper[4703]: E0202 13:07:29.318447 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="extract-utilities" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318507 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="extract-utilities" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318676 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="10075e21-6f03-425d-afe1-18896de4c242" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.318748 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="1302e5cf-bb3a-47d9-84fd-488cfc19acc3" containerName="registry-server" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.319752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.321665 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t9m2k" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.329144 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx"] Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.397553 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.397812 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.397868 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wq7dn\" (UniqueName: \"kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.498874 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.499172 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wq7dn\" (UniqueName: \"kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.499341 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.499453 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.499607 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.516201 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wq7dn\" (UniqueName: \"kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn\") pod \"b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:29 crc kubenswrapper[4703]: I0202 13:07:29.635232 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:30 crc kubenswrapper[4703]: I0202 13:07:30.031314 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx"] Feb 02 13:07:30 crc kubenswrapper[4703]: W0202 13:07:30.038879 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod949ed635_1fc0_4037_8d5a_08817761db87.slice/crio-f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b WatchSource:0}: Error finding container f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b: Status 404 returned error can't find the container with id f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b Feb 02 13:07:30 crc kubenswrapper[4703]: I0202 13:07:30.162691 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" event={"ID":"949ed635-1fc0-4037-8d5a-08817761db87","Type":"ContainerStarted","Data":"f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b"} Feb 02 13:07:31 crc kubenswrapper[4703]: I0202 13:07:31.170512 4703 generic.go:334] "Generic (PLEG): container finished" podID="949ed635-1fc0-4037-8d5a-08817761db87" containerID="9d602e76c9ca247363d0d2fc25b34c3e39ebb551fbcd3dee8aff89a61a915ade" exitCode=0 Feb 02 13:07:31 crc kubenswrapper[4703]: I0202 13:07:31.170560 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" event={"ID":"949ed635-1fc0-4037-8d5a-08817761db87","Type":"ContainerDied","Data":"9d602e76c9ca247363d0d2fc25b34c3e39ebb551fbcd3dee8aff89a61a915ade"} Feb 02 13:07:32 crc kubenswrapper[4703]: I0202 13:07:32.177255 4703 generic.go:334] "Generic (PLEG): container finished" podID="949ed635-1fc0-4037-8d5a-08817761db87" containerID="01fce3c0cf9338fd2d77e1883768042f963b508cd429449d46f5cf89f2eb117c" exitCode=0 Feb 02 13:07:32 crc kubenswrapper[4703]: I0202 13:07:32.177385 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" event={"ID":"949ed635-1fc0-4037-8d5a-08817761db87","Type":"ContainerDied","Data":"01fce3c0cf9338fd2d77e1883768042f963b508cd429449d46f5cf89f2eb117c"} Feb 02 13:07:33 crc kubenswrapper[4703]: I0202 13:07:33.186582 4703 generic.go:334] "Generic (PLEG): container finished" podID="949ed635-1fc0-4037-8d5a-08817761db87" containerID="d3c04ad42a5a136a93926e54837b8bb20dc899fbfcc2f129a64a4b7b8eadaa49" exitCode=0 Feb 02 13:07:33 crc kubenswrapper[4703]: I0202 13:07:33.186629 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" event={"ID":"949ed635-1fc0-4037-8d5a-08817761db87","Type":"ContainerDied","Data":"d3c04ad42a5a136a93926e54837b8bb20dc899fbfcc2f129a64a4b7b8eadaa49"} Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.454522 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.576467 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wq7dn\" (UniqueName: \"kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn\") pod \"949ed635-1fc0-4037-8d5a-08817761db87\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.576685 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle\") pod \"949ed635-1fc0-4037-8d5a-08817761db87\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.576712 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util\") pod \"949ed635-1fc0-4037-8d5a-08817761db87\" (UID: \"949ed635-1fc0-4037-8d5a-08817761db87\") " Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.577847 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle" (OuterVolumeSpecName: "bundle") pod "949ed635-1fc0-4037-8d5a-08817761db87" (UID: "949ed635-1fc0-4037-8d5a-08817761db87"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.582546 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn" (OuterVolumeSpecName: "kube-api-access-wq7dn") pod "949ed635-1fc0-4037-8d5a-08817761db87" (UID: "949ed635-1fc0-4037-8d5a-08817761db87"). InnerVolumeSpecName "kube-api-access-wq7dn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.589741 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util" (OuterVolumeSpecName: "util") pod "949ed635-1fc0-4037-8d5a-08817761db87" (UID: "949ed635-1fc0-4037-8d5a-08817761db87"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.678538 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.678577 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/949ed635-1fc0-4037-8d5a-08817761db87-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:34 crc kubenswrapper[4703]: I0202 13:07:34.678586 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wq7dn\" (UniqueName: \"kubernetes.io/projected/949ed635-1fc0-4037-8d5a-08817761db87-kube-api-access-wq7dn\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:35 crc kubenswrapper[4703]: I0202 13:07:35.200134 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" event={"ID":"949ed635-1fc0-4037-8d5a-08817761db87","Type":"ContainerDied","Data":"f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b"} Feb 02 13:07:35 crc kubenswrapper[4703]: I0202 13:07:35.200175 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx" Feb 02 13:07:35 crc kubenswrapper[4703]: I0202 13:07:35.200177 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f72248a1185540a159a497e86087e9162b4d37619a4b2f790fafd375e064824b" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.271476 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.272242 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-45sdh" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="registry-server" containerID="cri-o://59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70" gracePeriod=2 Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.691992 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.747116 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities\") pod \"08d236b1-2949-49f6-86d9-f8e15020157f\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.747208 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-bjct4\" (UniqueName: \"kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4\") pod \"08d236b1-2949-49f6-86d9-f8e15020157f\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.747253 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content\") pod \"08d236b1-2949-49f6-86d9-f8e15020157f\" (UID: \"08d236b1-2949-49f6-86d9-f8e15020157f\") " Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.748553 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities" (OuterVolumeSpecName: "utilities") pod "08d236b1-2949-49f6-86d9-f8e15020157f" (UID: "08d236b1-2949-49f6-86d9-f8e15020157f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.756289 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4" (OuterVolumeSpecName: "kube-api-access-bjct4") pod "08d236b1-2949-49f6-86d9-f8e15020157f" (UID: "08d236b1-2949-49f6-86d9-f8e15020157f"). InnerVolumeSpecName "kube-api-access-bjct4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.806333 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "08d236b1-2949-49f6-86d9-f8e15020157f" (UID: "08d236b1-2949-49f6-86d9-f8e15020157f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.849121 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-bjct4\" (UniqueName: \"kubernetes.io/projected/08d236b1-2949-49f6-86d9-f8e15020157f-kube-api-access-bjct4\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.849173 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:39 crc kubenswrapper[4703]: I0202 13:07:39.849184 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/08d236b1-2949-49f6-86d9-f8e15020157f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.237866 4703 generic.go:334] "Generic (PLEG): container finished" podID="08d236b1-2949-49f6-86d9-f8e15020157f" containerID="59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70" exitCode=0 Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.237916 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerDied","Data":"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70"} Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.237944 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-45sdh" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.237973 4703 scope.go:117] "RemoveContainer" containerID="59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.237958 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-45sdh" event={"ID":"08d236b1-2949-49f6-86d9-f8e15020157f","Type":"ContainerDied","Data":"d13c861a752e3937bf61383bdc23f5dd4b7263607a966b1a07d9617bb0be9935"} Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.257606 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.260764 4703 scope.go:117] "RemoveContainer" containerID="d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.277265 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-45sdh"] Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.287425 4703 scope.go:117] "RemoveContainer" containerID="122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.305355 4703 scope.go:117] "RemoveContainer" containerID="59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70" Feb 02 13:07:40 crc kubenswrapper[4703]: E0202 13:07:40.305860 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70\": container with ID starting with 59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70 not found: ID does not exist" containerID="59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.305921 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70"} err="failed to get container status \"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70\": rpc error: code = NotFound desc = could not find container \"59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70\": container with ID starting with 59526df6b1eea8f1be1d217a6369d864c37295abfa393686b1709e3013d1bc70 not found: ID does not exist" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.305960 4703 scope.go:117] "RemoveContainer" containerID="d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476" Feb 02 13:07:40 crc kubenswrapper[4703]: E0202 13:07:40.306580 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476\": container with ID starting with d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476 not found: ID does not exist" containerID="d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.306665 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476"} err="failed to get container status \"d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476\": rpc error: code = NotFound desc = could not find container \"d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476\": container with ID starting with d407ff4140e6a6948516b1d37710ce05c414ae735a9585109a77f9c3aa88b476 not found: ID does not exist" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.306740 4703 scope.go:117] "RemoveContainer" containerID="122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04" Feb 02 13:07:40 crc kubenswrapper[4703]: E0202 13:07:40.307185 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04\": container with ID starting with 122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04 not found: ID does not exist" containerID="122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04" Feb 02 13:07:40 crc kubenswrapper[4703]: I0202 13:07:40.307222 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04"} err="failed to get container status \"122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04\": rpc error: code = NotFound desc = could not find container \"122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04\": container with ID starting with 122d54d1e066155779511b035c4e179b91d1da0b47a52c883c8c3e28c6616b04 not found: ID does not exist" Feb 02 13:07:41 crc kubenswrapper[4703]: I0202 13:07:41.943259 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" path="/var/lib/kubelet/pods/08d236b1-2949-49f6-86d9-f8e15020157f/volumes" Feb 02 13:07:45 crc kubenswrapper[4703]: I0202 13:07:45.984939 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:07:45 crc kubenswrapper[4703]: I0202 13:07:45.985349 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:07:47 crc kubenswrapper[4703]: I0202 13:07:47.292178 4703 generic.go:334] "Generic (PLEG): container finished" podID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerID="268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c" exitCode=0 Feb 02 13:07:47 crc kubenswrapper[4703]: I0202 13:07:47.292249 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerDied","Data":"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c"} Feb 02 13:07:48 crc kubenswrapper[4703]: I0202 13:07:48.300263 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerStarted","Data":"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99"} Feb 02 13:07:48 crc kubenswrapper[4703]: I0202 13:07:48.300845 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:07:48 crc kubenswrapper[4703]: I0202 13:07:48.323132 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/rabbitmq-server-0" podStartSLOduration=36.182599357 podStartE2EDuration="43.323114674s" podCreationTimestamp="2026-02-02 13:07:05 +0000 UTC" firstStartedPulling="2026-02-02 13:07:07.376378705 +0000 UTC m=+954.391586239" lastFinishedPulling="2026-02-02 13:07:14.516894022 +0000 UTC m=+961.532101556" observedRunningTime="2026-02-02 13:07:48.321160289 +0000 UTC m=+995.336367823" watchObservedRunningTime="2026-02-02 13:07:48.323114674 +0000 UTC m=+995.338322208" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.651426 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.651987 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="extract-content" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652006 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="extract-content" Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.652023 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="pull" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652031 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="pull" Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.652052 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="extract" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652059 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="extract" Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.652070 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="util" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652080 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="util" Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.652090 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="registry-server" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652097 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="registry-server" Feb 02 13:07:51 crc kubenswrapper[4703]: E0202 13:07:51.652113 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="extract-utilities" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652120 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="extract-utilities" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652257 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="949ed635-1fc0-4037-8d5a-08817761db87" containerName="extract" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652290 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="08d236b1-2949-49f6-86d9-f8e15020157f" containerName="registry-server" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.652869 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.654636 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-dockercfg-k6dl5" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.655055 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"keystone-operator-controller-manager-service-cert" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.667143 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.803480 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.803534 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dwc7m\" (UniqueName: \"kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.803584 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.905589 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.905638 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dwc7m\" (UniqueName: \"kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.905685 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.911162 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.911370 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.924985 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dwc7m\" (UniqueName: \"kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m\") pod \"keystone-operator-controller-manager-64d7687b4f-627k6\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:51 crc kubenswrapper[4703]: I0202 13:07:51.972580 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:52 crc kubenswrapper[4703]: I0202 13:07:52.193993 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:07:52 crc kubenswrapper[4703]: W0202 13:07:52.198644 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod86515156_fe01_412d_a10d_16ba26cfb8f8.slice/crio-82730dc3c4b95ccf9bc3cc67a59af6d50d32defeddaa016255df49401cb9e69d WatchSource:0}: Error finding container 82730dc3c4b95ccf9bc3cc67a59af6d50d32defeddaa016255df49401cb9e69d: Status 404 returned error can't find the container with id 82730dc3c4b95ccf9bc3cc67a59af6d50d32defeddaa016255df49401cb9e69d Feb 02 13:07:52 crc kubenswrapper[4703]: I0202 13:07:52.324982 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" event={"ID":"86515156-fe01-412d-a10d-16ba26cfb8f8","Type":"ContainerStarted","Data":"82730dc3c4b95ccf9bc3cc67a59af6d50d32defeddaa016255df49401cb9e69d"} Feb 02 13:07:56 crc kubenswrapper[4703]: I0202 13:07:56.352818 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" event={"ID":"86515156-fe01-412d-a10d-16ba26cfb8f8","Type":"ContainerStarted","Data":"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158"} Feb 02 13:07:56 crc kubenswrapper[4703]: I0202 13:07:56.353410 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:07:56 crc kubenswrapper[4703]: I0202 13:07:56.371907 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" podStartSLOduration=2.05317953 podStartE2EDuration="5.37188011s" podCreationTimestamp="2026-02-02 13:07:51 +0000 UTC" firstStartedPulling="2026-02-02 13:07:52.201390585 +0000 UTC m=+999.216598119" lastFinishedPulling="2026-02-02 13:07:55.520091155 +0000 UTC m=+1002.535298699" observedRunningTime="2026-02-02 13:07:56.370014297 +0000 UTC m=+1003.385221841" watchObservedRunningTime="2026-02-02 13:07:56.37188011 +0000 UTC m=+1003.387087644" Feb 02 13:08:01 crc kubenswrapper[4703]: I0202 13:08:01.979082 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.691646 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-db-create-pqgbn"] Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.692926 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.700922 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l"] Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.702025 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.704601 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-db-secret" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.706821 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pqgbn"] Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.711490 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l"] Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.831540 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6wm5p\" (UniqueName: \"kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.831612 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-zmczb\" (UniqueName: \"kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.831683 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.831721 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.933322 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.933379 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.933435 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6wm5p\" (UniqueName: \"kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.933459 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-zmczb\" (UniqueName: \"kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.934555 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.935442 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.954586 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-zmczb\" (UniqueName: \"kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb\") pod \"keystone-db-create-pqgbn\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.954686 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6wm5p\" (UniqueName: \"kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p\") pod \"keystone-8b43-account-create-update-8dr7l\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.961802 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/ceph"] Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.962528 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Feb 02 13:08:05 crc kubenswrapper[4703]: I0202 13:08:05.964717 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"default-dockercfg-htrbj" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.011065 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.024881 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.136461 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.136790 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-46ngm\" (UniqueName: \"kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.136903 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.136930 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.238429 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.238505 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.238575 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.238619 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-46ngm\" (UniqueName: \"kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.239478 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.239511 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.239579 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.267267 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-46ngm\" (UniqueName: \"kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm\") pod \"ceph\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.311665 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.415837 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"4460b73f-1e46-424f-896a-64e152c5976c","Type":"ContainerStarted","Data":"59ffecb5ff42fad05e7c6386311f97cccabb0213aaf62872a30f9917c51845fb"} Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.527041 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l"] Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.533300 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pqgbn"] Feb 02 13:08:06 crc kubenswrapper[4703]: W0202 13:08:06.538656 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podaa79b809_2747_49db_bc5f_995a0b69ac29.slice/crio-926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e WatchSource:0}: Error finding container 926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e: Status 404 returned error can't find the container with id 926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e Feb 02 13:08:06 crc kubenswrapper[4703]: I0202 13:08:06.763219 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.427510 4703 generic.go:334] "Generic (PLEG): container finished" podID="ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" containerID="a7cf33914bb0d202b3380fff8479205963f561a34fcce476683b7bd4620ec953" exitCode=0 Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.427649 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" event={"ID":"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb","Type":"ContainerDied","Data":"a7cf33914bb0d202b3380fff8479205963f561a34fcce476683b7bd4620ec953"} Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.427878 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" event={"ID":"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb","Type":"ContainerStarted","Data":"ef218e0210b8e45b78f826d267c9cceefd5bf1d6c9fbf21475a206a40c440f7b"} Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.429559 4703 generic.go:334] "Generic (PLEG): container finished" podID="aa79b809-2747-49db-bc5f-995a0b69ac29" containerID="74d87d84d3fac3cca2a111e73b93efd31c06226dd3c7e9a936fbf01e79dde69e" exitCode=0 Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.429631 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pqgbn" event={"ID":"aa79b809-2747-49db-bc5f-995a0b69ac29","Type":"ContainerDied","Data":"74d87d84d3fac3cca2a111e73b93efd31c06226dd3c7e9a936fbf01e79dde69e"} Feb 02 13:08:07 crc kubenswrapper[4703]: I0202 13:08:07.429724 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pqgbn" event={"ID":"aa79b809-2747-49db-bc5f-995a0b69ac29","Type":"ContainerStarted","Data":"926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e"} Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.795300 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.844167 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.881986 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts\") pod \"aa79b809-2747-49db-bc5f-995a0b69ac29\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.882059 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-zmczb\" (UniqueName: \"kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb\") pod \"aa79b809-2747-49db-bc5f-995a0b69ac29\" (UID: \"aa79b809-2747-49db-bc5f-995a0b69ac29\") " Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.883899 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "aa79b809-2747-49db-bc5f-995a0b69ac29" (UID: "aa79b809-2747-49db-bc5f-995a0b69ac29"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.891402 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb" (OuterVolumeSpecName: "kube-api-access-zmczb") pod "aa79b809-2747-49db-bc5f-995a0b69ac29" (UID: "aa79b809-2747-49db-bc5f-995a0b69ac29"). InnerVolumeSpecName "kube-api-access-zmczb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.983352 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts\") pod \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.983418 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6wm5p\" (UniqueName: \"kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p\") pod \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\" (UID: \"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb\") " Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.983736 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/aa79b809-2747-49db-bc5f-995a0b69ac29-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.983753 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-zmczb\" (UniqueName: \"kubernetes.io/projected/aa79b809-2747-49db-bc5f-995a0b69ac29-kube-api-access-zmczb\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.983771 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" (UID: "ea4ff59a-3aba-4e33-9be4-be34ed09f4cb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:08:08 crc kubenswrapper[4703]: I0202 13:08:08.987415 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p" (OuterVolumeSpecName: "kube-api-access-6wm5p") pod "ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" (UID: "ea4ff59a-3aba-4e33-9be4-be34ed09f4cb"). InnerVolumeSpecName "kube-api-access-6wm5p". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.084929 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.084969 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6wm5p\" (UniqueName: \"kubernetes.io/projected/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb-kube-api-access-6wm5p\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.443984 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-create-pqgbn" event={"ID":"aa79b809-2747-49db-bc5f-995a0b69ac29","Type":"ContainerDied","Data":"926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e"} Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.444295 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="926fb93db2c2175f3310e4d2bdf3dcdca76460a8038ff629bf7c3d7367e4c41e" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.444022 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-create-pqgbn" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.446123 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" event={"ID":"ea4ff59a-3aba-4e33-9be4-be34ed09f4cb","Type":"ContainerDied","Data":"ef218e0210b8e45b78f826d267c9cceefd5bf1d6c9fbf21475a206a40c440f7b"} Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.446155 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l" Feb 02 13:08:09 crc kubenswrapper[4703]: I0202 13:08:09.446166 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ef218e0210b8e45b78f826d267c9cceefd5bf1d6c9fbf21475a206a40c440f7b" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.437786 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-db-sync-6sl92"] Feb 02 13:08:11 crc kubenswrapper[4703]: E0202 13:08:11.438065 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="aa79b809-2747-49db-bc5f-995a0b69ac29" containerName="mariadb-database-create" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.438082 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="aa79b809-2747-49db-bc5f-995a0b69ac29" containerName="mariadb-database-create" Feb 02 13:08:11 crc kubenswrapper[4703]: E0202 13:08:11.438089 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" containerName="mariadb-account-create-update" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.438095 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" containerName="mariadb-account-create-update" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.438221 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" containerName="mariadb-account-create-update" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.438234 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="aa79b809-2747-49db-bc5f-995a0b69ac29" containerName="mariadb-database-create" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.438632 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.441211 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.441534 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.441962 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.442244 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-pgz66" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.455569 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-6sl92"] Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.517621 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.517806 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-8ms2t\" (UniqueName: \"kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.618879 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-8ms2t\" (UniqueName: \"kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.618940 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.635901 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.635924 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-8ms2t\" (UniqueName: \"kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t\") pod \"keystone-db-sync-6sl92\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:11 crc kubenswrapper[4703]: I0202 13:08:11.762727 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:12 crc kubenswrapper[4703]: I0202 13:08:12.243140 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-6sl92"] Feb 02 13:08:12 crc kubenswrapper[4703]: I0202 13:08:12.476545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-6sl92" event={"ID":"77c94813-39fb-452c-b9f6-51a00bbe7e4e","Type":"ContainerStarted","Data":"afffa31ec37e9f60dafdfdfc3af8cc1a766cf04e3d3acd00cf193c1a907bf365"} Feb 02 13:08:15 crc kubenswrapper[4703]: I0202 13:08:15.984679 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:08:15 crc kubenswrapper[4703]: I0202 13:08:15.985478 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:08:37 crc kubenswrapper[4703]: E0202 13:08:37.757110 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/podified-antelope-centos9/openstack-keystone:current-podified" Feb 02 13:08:37 crc kubenswrapper[4703]: E0202 13:08:37.757857 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:keystone-db-sync,Image:quay.io/podified-antelope-centos9/openstack-keystone:current-podified,Command:[/bin/bash],Args:[-c keystone-manage db_sync],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:KOLLA_BOOTSTRAP,Value:true,ValueFrom:nil,},EnvVar{Name:KOLLA_CONFIG_STRATEGY,Value:COPY_ALWAYS,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/keystone/keystone.conf,SubPath:keystone.conf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:config-data,ReadOnly:true,MountPath:/etc/my.cnf,SubPath:my.cnf,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-8ms2t,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:&SecurityContext{Capabilities:&Capabilities{Add:[],Drop:[MKNOD],},Privileged:nil,SELinuxOptions:nil,RunAsUser:*42425,RunAsNonRoot:*true,ReadOnlyRootFilesystem:nil,AllowPrivilegeEscalation:nil,RunAsGroup:*42425,ProcMount:nil,WindowsOptions:nil,SeccompProfile:nil,AppArmorProfile:nil,},Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod keystone-db-sync-6sl92_manila-kuttl-tests(77c94813-39fb-452c-b9f6-51a00bbe7e4e): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 13:08:37 crc kubenswrapper[4703]: E0202 13:08:37.759106 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/keystone-db-sync-6sl92" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" Feb 02 13:08:38 crc kubenswrapper[4703]: E0202 13:08:38.288713 4703 log.go:32] "PullImage from image service failed" err="rpc error: code = Canceled desc = copying config: context canceled" image="quay.io/ceph/demo:latest-squid" Feb 02 13:08:38 crc kubenswrapper[4703]: E0202 13:08:38.288848 4703 kuberuntime_manager.go:1274] "Unhandled Error" err="container &Container{Name:ceph,Image:quay.io/ceph/demo:latest-squid,Command:[],Args:[],WorkingDir:,Ports:[]ContainerPort{},Env:[]EnvVar{EnvVar{Name:MON_IP,Value:192.168.126.11,ValueFrom:nil,},EnvVar{Name:CEPH_DAEMON,Value:demo,ValueFrom:nil,},EnvVar{Name:CEPH_PUBLIC_NETWORK,Value:0.0.0.0/0,ValueFrom:nil,},EnvVar{Name:DEMO_DAEMONS,Value:osd,mds,rgw,ValueFrom:nil,},EnvVar{Name:CEPH_DEMO_UID,Value:0,ValueFrom:nil,},EnvVar{Name:RGW_NAME,Value:ceph,ValueFrom:nil,},},Resources:ResourceRequirements{Limits:ResourceList{},Requests:ResourceList{},Claims:[]ResourceClaim{},},VolumeMounts:[]VolumeMount{VolumeMount{Name:data,ReadOnly:false,MountPath:/var/lib/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:log,ReadOnly:false,MountPath:/var/log/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:run,ReadOnly:false,MountPath:/run/ceph,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},VolumeMount{Name:kube-api-access-46ngm,ReadOnly:true,MountPath:/var/run/secrets/kubernetes.io/serviceaccount,SubPath:,MountPropagation:nil,SubPathExpr:,RecursiveReadOnly:nil,},},LivenessProbe:nil,ReadinessProbe:nil,Lifecycle:nil,TerminationMessagePath:/dev/termination-log,ImagePullPolicy:IfNotPresent,SecurityContext:nil,Stdin:false,StdinOnce:false,TTY:false,EnvFrom:[]EnvFromSource{},TerminationMessagePolicy:File,VolumeDevices:[]VolumeDevice{},StartupProbe:nil,ResizePolicy:[]ContainerResizePolicy{},RestartPolicy:nil,} start failed in pod ceph_manila-kuttl-tests(4460b73f-1e46-424f-896a-64e152c5976c): ErrImagePull: rpc error: code = Canceled desc = copying config: context canceled" logger="UnhandledError" Feb 02 13:08:38 crc kubenswrapper[4703]: E0202 13:08:38.293324 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ErrImagePull: \"rpc error: code = Canceled desc = copying config: context canceled\"" pod="manila-kuttl-tests/ceph" podUID="4460b73f-1e46-424f-896a-64e152c5976c" Feb 02 13:08:38 crc kubenswrapper[4703]: E0202 13:08:38.683977 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"ceph\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/ceph/demo:latest-squid\\\"\"" pod="manila-kuttl-tests/ceph" podUID="4460b73f-1e46-424f-896a-64e152c5976c" Feb 02 13:08:38 crc kubenswrapper[4703]: E0202 13:08:38.684165 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"keystone-db-sync\" with ImagePullBackOff: \"Back-off pulling image \\\"quay.io/podified-antelope-centos9/openstack-keystone:current-podified\\\"\"" pod="manila-kuttl-tests/keystone-db-sync-6sl92" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" Feb 02 13:08:45 crc kubenswrapper[4703]: I0202 13:08:45.984951 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:08:45 crc kubenswrapper[4703]: I0202 13:08:45.985646 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:08:45 crc kubenswrapper[4703]: I0202 13:08:45.985703 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:08:45 crc kubenswrapper[4703]: I0202 13:08:45.986397 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:08:45 crc kubenswrapper[4703]: I0202 13:08:45.986476 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355" gracePeriod=600 Feb 02 13:08:46 crc kubenswrapper[4703]: I0202 13:08:46.747858 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355" exitCode=0 Feb 02 13:08:46 crc kubenswrapper[4703]: I0202 13:08:46.747940 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355"} Feb 02 13:08:46 crc kubenswrapper[4703]: I0202 13:08:46.748431 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2"} Feb 02 13:08:46 crc kubenswrapper[4703]: I0202 13:08:46.748457 4703 scope.go:117] "RemoveContainer" containerID="7989a84887b494a315910cd56de11e22a0e359c557f3cdff1b1ca6e34517f165" Feb 02 13:08:51 crc kubenswrapper[4703]: I0202 13:08:51.937649 4703 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 13:08:52 crc kubenswrapper[4703]: I0202 13:08:52.793199 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-6sl92" event={"ID":"77c94813-39fb-452c-b9f6-51a00bbe7e4e","Type":"ContainerStarted","Data":"161766a10b1b949b3779045299836d9d4d2dfef5d890922e220b2fdcd4628ac8"} Feb 02 13:08:52 crc kubenswrapper[4703]: I0202 13:08:52.794933 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"4460b73f-1e46-424f-896a-64e152c5976c","Type":"ContainerStarted","Data":"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f"} Feb 02 13:08:52 crc kubenswrapper[4703]: I0202 13:08:52.816377 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-db-sync-6sl92" podStartSLOduration=1.490849341 podStartE2EDuration="41.816361567s" podCreationTimestamp="2026-02-02 13:08:11 +0000 UTC" firstStartedPulling="2026-02-02 13:08:12.252661706 +0000 UTC m=+1019.267869240" lastFinishedPulling="2026-02-02 13:08:52.578173932 +0000 UTC m=+1059.593381466" observedRunningTime="2026-02-02 13:08:52.810535411 +0000 UTC m=+1059.825742985" watchObservedRunningTime="2026-02-02 13:08:52.816361567 +0000 UTC m=+1059.831569101" Feb 02 13:08:52 crc kubenswrapper[4703]: I0202 13:08:52.829824 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/ceph" podStartSLOduration=1.8278493 podStartE2EDuration="47.82980355s" podCreationTimestamp="2026-02-02 13:08:05 +0000 UTC" firstStartedPulling="2026-02-02 13:08:06.347503526 +0000 UTC m=+1013.362711060" lastFinishedPulling="2026-02-02 13:08:52.349457776 +0000 UTC m=+1059.364665310" observedRunningTime="2026-02-02 13:08:52.82488409 +0000 UTC m=+1059.840091634" watchObservedRunningTime="2026-02-02 13:08:52.82980355 +0000 UTC m=+1059.845011084" Feb 02 13:08:57 crc kubenswrapper[4703]: I0202 13:08:57.831204 4703 generic.go:334] "Generic (PLEG): container finished" podID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" containerID="161766a10b1b949b3779045299836d9d4d2dfef5d890922e220b2fdcd4628ac8" exitCode=0 Feb 02 13:08:57 crc kubenswrapper[4703]: I0202 13:08:57.831322 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-6sl92" event={"ID":"77c94813-39fb-452c-b9f6-51a00bbe7e4e","Type":"ContainerDied","Data":"161766a10b1b949b3779045299836d9d4d2dfef5d890922e220b2fdcd4628ac8"} Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.134000 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.302007 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-8ms2t\" (UniqueName: \"kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t\") pod \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.302089 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data\") pod \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\" (UID: \"77c94813-39fb-452c-b9f6-51a00bbe7e4e\") " Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.313165 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t" (OuterVolumeSpecName: "kube-api-access-8ms2t") pod "77c94813-39fb-452c-b9f6-51a00bbe7e4e" (UID: "77c94813-39fb-452c-b9f6-51a00bbe7e4e"). InnerVolumeSpecName "kube-api-access-8ms2t". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.355980 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data" (OuterVolumeSpecName: "config-data") pod "77c94813-39fb-452c-b9f6-51a00bbe7e4e" (UID: "77c94813-39fb-452c-b9f6-51a00bbe7e4e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.403895 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-8ms2t\" (UniqueName: \"kubernetes.io/projected/77c94813-39fb-452c-b9f6-51a00bbe7e4e-kube-api-access-8ms2t\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.403952 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/77c94813-39fb-452c-b9f6-51a00bbe7e4e-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.865999 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-db-sync-6sl92" event={"ID":"77c94813-39fb-452c-b9f6-51a00bbe7e4e","Type":"ContainerDied","Data":"afffa31ec37e9f60dafdfdfc3af8cc1a766cf04e3d3acd00cf193c1a907bf365"} Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.866701 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="afffa31ec37e9f60dafdfdfc3af8cc1a766cf04e3d3acd00cf193c1a907bf365" Feb 02 13:08:59 crc kubenswrapper[4703]: I0202 13:08:59.866024 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-db-sync-6sl92" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.096721 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-t26td"] Feb 02 13:09:00 crc kubenswrapper[4703]: E0202 13:09:00.097072 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" containerName="keystone-db-sync" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.097089 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" containerName="keystone-db-sync" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.097246 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" containerName="keystone-db-sync" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.097752 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.105160 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.105173 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"osp-secret" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.105236 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.105403 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-pgz66" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.105451 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.119050 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-t26td"] Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.215966 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.216040 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.216082 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.216183 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-g6hjd\" (UniqueName: \"kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.216244 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.317754 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.317842 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-g6hjd\" (UniqueName: \"kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.317864 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.317927 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.317952 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.322536 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.322976 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.323055 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.326342 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.342669 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-g6hjd\" (UniqueName: \"kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd\") pod \"keystone-bootstrap-t26td\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.425574 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.830814 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-t26td"] Feb 02 13:09:00 crc kubenswrapper[4703]: I0202 13:09:00.873422 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-t26td" event={"ID":"c81d425d-d6cf-4521-a429-8febc3442348","Type":"ContainerStarted","Data":"7c06e54d0850b1a9770003445ff15ce96f3a77a92818261d63597a4e51bc4e21"} Feb 02 13:09:01 crc kubenswrapper[4703]: I0202 13:09:01.886194 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-t26td" event={"ID":"c81d425d-d6cf-4521-a429-8febc3442348","Type":"ContainerStarted","Data":"393e4c516e9f2d3d8721685db60f695da8805e271d2b3f27ef4b14b8d1ac29c8"} Feb 02 13:09:01 crc kubenswrapper[4703]: I0202 13:09:01.906668 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-bootstrap-t26td" podStartSLOduration=1.906649392 podStartE2EDuration="1.906649392s" podCreationTimestamp="2026-02-02 13:09:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:09:01.90165515 +0000 UTC m=+1068.916862694" watchObservedRunningTime="2026-02-02 13:09:01.906649392 +0000 UTC m=+1068.921856926" Feb 02 13:09:04 crc kubenswrapper[4703]: I0202 13:09:04.912078 4703 generic.go:334] "Generic (PLEG): container finished" podID="c81d425d-d6cf-4521-a429-8febc3442348" containerID="393e4c516e9f2d3d8721685db60f695da8805e271d2b3f27ef4b14b8d1ac29c8" exitCode=0 Feb 02 13:09:04 crc kubenswrapper[4703]: I0202 13:09:04.912191 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-t26td" event={"ID":"c81d425d-d6cf-4521-a429-8febc3442348","Type":"ContainerDied","Data":"393e4c516e9f2d3d8721685db60f695da8805e271d2b3f27ef4b14b8d1ac29c8"} Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.210386 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.404006 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys\") pod \"c81d425d-d6cf-4521-a429-8febc3442348\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.404314 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-g6hjd\" (UniqueName: \"kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd\") pod \"c81d425d-d6cf-4521-a429-8febc3442348\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.404353 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data\") pod \"c81d425d-d6cf-4521-a429-8febc3442348\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.404417 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys\") pod \"c81d425d-d6cf-4521-a429-8febc3442348\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.404451 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts\") pod \"c81d425d-d6cf-4521-a429-8febc3442348\" (UID: \"c81d425d-d6cf-4521-a429-8febc3442348\") " Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.409623 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts" (OuterVolumeSpecName: "scripts") pod "c81d425d-d6cf-4521-a429-8febc3442348" (UID: "c81d425d-d6cf-4521-a429-8febc3442348"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.410518 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd" (OuterVolumeSpecName: "kube-api-access-g6hjd") pod "c81d425d-d6cf-4521-a429-8febc3442348" (UID: "c81d425d-d6cf-4521-a429-8febc3442348"). InnerVolumeSpecName "kube-api-access-g6hjd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.411375 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "c81d425d-d6cf-4521-a429-8febc3442348" (UID: "c81d425d-d6cf-4521-a429-8febc3442348"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.411443 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "c81d425d-d6cf-4521-a429-8febc3442348" (UID: "c81d425d-d6cf-4521-a429-8febc3442348"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.429324 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data" (OuterVolumeSpecName: "config-data") pod "c81d425d-d6cf-4521-a429-8febc3442348" (UID: "c81d425d-d6cf-4521-a429-8febc3442348"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.505741 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.505779 4703 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.505792 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-g6hjd\" (UniqueName: \"kubernetes.io/projected/c81d425d-d6cf-4521-a429-8febc3442348-kube-api-access-g6hjd\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.505801 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.505809 4703 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/c81d425d-d6cf-4521-a429-8febc3442348-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.926512 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-bootstrap-t26td" event={"ID":"c81d425d-d6cf-4521-a429-8febc3442348","Type":"ContainerDied","Data":"7c06e54d0850b1a9770003445ff15ce96f3a77a92818261d63597a4e51bc4e21"} Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.926562 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7c06e54d0850b1a9770003445ff15ce96f3a77a92818261d63597a4e51bc4e21" Feb 02 13:09:06 crc kubenswrapper[4703]: I0202 13:09:06.926583 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-bootstrap-t26td" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.110017 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:09:07 crc kubenswrapper[4703]: E0202 13:09:07.110779 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c81d425d-d6cf-4521-a429-8febc3442348" containerName="keystone-bootstrap" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.110806 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c81d425d-d6cf-4521-a429-8febc3442348" containerName="keystone-bootstrap" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.111265 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c81d425d-d6cf-4521-a429-8febc3442348" containerName="keystone-bootstrap" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.112212 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.116286 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.117351 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-keystone-dockercfg-pgz66" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.117806 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-scripts" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.122759 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"keystone-config-data" Feb 02 13:09:07 crc kubenswrapper[4703]: I0202 13:09:07.129755 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.491142 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.491219 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.491262 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.491360 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-mf6kk\" (UniqueName: \"kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.492081 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.492201 4703 fsHandler.go:133] fs: disk usage and inodes count on following dirs took 1.116777453s: [/var/lib/containers/storage/overlay/33582ac4a16a6fe91a41f3ae587d7c1a284f51b64a31025c422b2f8e19b73b7d/diff ]; will not log again for this container unless duration exceeds 2s Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.494487 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openshift-marketplace/community-operators-m8xxx" podUID="1a77f425-fdf2-44a5-8fac-d41dc0307d14" containerName="registry-server" probeResult="failure" output=< Feb 02 13:09:08 crc kubenswrapper[4703]: timeout: failed to connect service ":50051" within 1s Feb 02 13:09:08 crc kubenswrapper[4703]: > Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.494536 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-marketplace/community-operators-m8xxx" podUID="1a77f425-fdf2-44a5-8fac-d41dc0307d14" containerName="registry-server" probeResult="failure" output=< Feb 02 13:09:08 crc kubenswrapper[4703]: timeout: failed to connect service ":50051" within 1s Feb 02 13:09:08 crc kubenswrapper[4703]: > Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.593964 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.594588 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.594752 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-mf6kk\" (UniqueName: \"kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.594817 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.594910 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.598705 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.599374 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.603149 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.604676 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.620475 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-mf6kk\" (UniqueName: \"kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk\") pod \"keystone-5999f99c86-s96tg\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:08 crc kubenswrapper[4703]: I0202 13:09:08.816943 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:09 crc kubenswrapper[4703]: I0202 13:09:09.263376 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:09:09 crc kubenswrapper[4703]: W0202 13:09:09.265328 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod199d616b_b578_48ae_aad7_e4e2ed6f3d31.slice/crio-0fd4b74d0f263d25815c2ae237ccb59ba4f1a30bd5aca7687ea5a3f965dc1c67 WatchSource:0}: Error finding container 0fd4b74d0f263d25815c2ae237ccb59ba4f1a30bd5aca7687ea5a3f965dc1c67: Status 404 returned error can't find the container with id 0fd4b74d0f263d25815c2ae237ccb59ba4f1a30bd5aca7687ea5a3f965dc1c67 Feb 02 13:09:09 crc kubenswrapper[4703]: I0202 13:09:09.527031 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" event={"ID":"199d616b-b578-48ae-aad7-e4e2ed6f3d31","Type":"ContainerStarted","Data":"d2b8dbed2bc9f6377b0ce77e910f11799b70b4b86d5a0cc9bf494f36329fa105"} Feb 02 13:09:09 crc kubenswrapper[4703]: I0202 13:09:09.527389 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:09 crc kubenswrapper[4703]: I0202 13:09:09.527401 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" event={"ID":"199d616b-b578-48ae-aad7-e4e2ed6f3d31","Type":"ContainerStarted","Data":"0fd4b74d0f263d25815c2ae237ccb59ba4f1a30bd5aca7687ea5a3f965dc1c67"} Feb 02 13:09:09 crc kubenswrapper[4703]: I0202 13:09:09.549081 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" podStartSLOduration=2.549059031 podStartE2EDuration="2.549059031s" podCreationTimestamp="2026-02-02 13:09:07 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:09:09.54268197 +0000 UTC m=+1076.557889514" watchObservedRunningTime="2026-02-02 13:09:09.549059031 +0000 UTC m=+1076.564266565" Feb 02 13:09:14 crc kubenswrapper[4703]: E0202 13:09:14.658380 4703 upgradeaware.go:441] Error proxying data from backend to client: writeto tcp 38.102.83.198:55914->38.102.83.198:33405: read tcp 38.102.83.198:55914->38.102.83.198:33405: read: connection reset by peer Feb 02 13:09:40 crc kubenswrapper[4703]: I0202 13:09:40.267567 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:09:50 crc kubenswrapper[4703]: E0202 13:09:50.953292 4703 upgradeaware.go:427] Error proxying data from client to backend: readfrom tcp 38.102.83.198:35914->38.102.83.198:33405: write tcp 38.102.83.198:35914->38.102.83.198:33405: write: connection reset by peer Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.373596 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.374599 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.377216 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-index-dockercfg-z8kc8" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.382149 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.476754 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-676bq\" (UniqueName: \"kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq\") pod \"manila-operator-index-299cf\" (UID: \"8d0db345-6af9-4931-a733-dd7c6cdc8c54\") " pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.578669 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-676bq\" (UniqueName: \"kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq\") pod \"manila-operator-index-299cf\" (UID: \"8d0db345-6af9-4931-a733-dd7c6cdc8c54\") " pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.603057 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-676bq\" (UniqueName: \"kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq\") pod \"manila-operator-index-299cf\" (UID: \"8d0db345-6af9-4931-a733-dd7c6cdc8c54\") " pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.694744 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:52 crc kubenswrapper[4703]: I0202 13:09:52.900509 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:53 crc kubenswrapper[4703]: I0202 13:09:53.864283 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-299cf" event={"ID":"8d0db345-6af9-4931-a733-dd7c6cdc8c54","Type":"ContainerStarted","Data":"7f75ae72d35df3db616ecf7b760ed55ac7c3ac6efa3115f7bf88ab9aa55eeb57"} Feb 02 13:09:55 crc kubenswrapper[4703]: I0202 13:09:55.880825 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-299cf" event={"ID":"8d0db345-6af9-4931-a733-dd7c6cdc8c54","Type":"ContainerStarted","Data":"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a"} Feb 02 13:09:55 crc kubenswrapper[4703]: I0202 13:09:55.904579 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-index-299cf" podStartSLOduration=1.659544032 podStartE2EDuration="3.904549924s" podCreationTimestamp="2026-02-02 13:09:52 +0000 UTC" firstStartedPulling="2026-02-02 13:09:52.909747169 +0000 UTC m=+1119.924954703" lastFinishedPulling="2026-02-02 13:09:55.154753021 +0000 UTC m=+1122.169960595" observedRunningTime="2026-02-02 13:09:55.899993094 +0000 UTC m=+1122.915200618" watchObservedRunningTime="2026-02-02 13:09:55.904549924 +0000 UTC m=+1122.919757508" Feb 02 13:09:56 crc kubenswrapper[4703]: I0202 13:09:56.571880 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.173497 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.174502 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.189101 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.256554 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9rz4k\" (UniqueName: \"kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k\") pod \"manila-operator-index-tbn87\" (UID: \"3daa09fb-f816-46c0-a43f-312554985188\") " pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.357628 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9rz4k\" (UniqueName: \"kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k\") pod \"manila-operator-index-tbn87\" (UID: \"3daa09fb-f816-46c0-a43f-312554985188\") " pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.376787 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9rz4k\" (UniqueName: \"kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k\") pod \"manila-operator-index-tbn87\" (UID: \"3daa09fb-f816-46c0-a43f-312554985188\") " pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.505686 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.908700 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-index-299cf" podUID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" containerName="registry-server" containerID="cri-o://2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a" gracePeriod=2 Feb 02 13:09:57 crc kubenswrapper[4703]: I0202 13:09:57.918259 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:09:57 crc kubenswrapper[4703]: W0202 13:09:57.926358 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3daa09fb_f816_46c0_a43f_312554985188.slice/crio-71cacafb1fc434deb6f2dc44591881a07c67ae31dbccce2c2191797362ec9c61 WatchSource:0}: Error finding container 71cacafb1fc434deb6f2dc44591881a07c67ae31dbccce2c2191797362ec9c61: Status 404 returned error can't find the container with id 71cacafb1fc434deb6f2dc44591881a07c67ae31dbccce2c2191797362ec9c61 Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.349399 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.478555 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-676bq\" (UniqueName: \"kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq\") pod \"8d0db345-6af9-4931-a733-dd7c6cdc8c54\" (UID: \"8d0db345-6af9-4931-a733-dd7c6cdc8c54\") " Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.485593 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq" (OuterVolumeSpecName: "kube-api-access-676bq") pod "8d0db345-6af9-4931-a733-dd7c6cdc8c54" (UID: "8d0db345-6af9-4931-a733-dd7c6cdc8c54"). InnerVolumeSpecName "kube-api-access-676bq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.579855 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-676bq\" (UniqueName: \"kubernetes.io/projected/8d0db345-6af9-4931-a733-dd7c6cdc8c54-kube-api-access-676bq\") on node \"crc\" DevicePath \"\"" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.918447 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-tbn87" event={"ID":"3daa09fb-f816-46c0-a43f-312554985188","Type":"ContainerStarted","Data":"60ea15301152b9f689dd5d9ce73c78ada802ad99c4bae753a875e04975e2360a"} Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.918512 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-tbn87" event={"ID":"3daa09fb-f816-46c0-a43f-312554985188","Type":"ContainerStarted","Data":"71cacafb1fc434deb6f2dc44591881a07c67ae31dbccce2c2191797362ec9c61"} Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.920704 4703 generic.go:334] "Generic (PLEG): container finished" podID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" containerID="2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a" exitCode=0 Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.920750 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-299cf" event={"ID":"8d0db345-6af9-4931-a733-dd7c6cdc8c54","Type":"ContainerDied","Data":"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a"} Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.920774 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-299cf" event={"ID":"8d0db345-6af9-4931-a733-dd7c6cdc8c54","Type":"ContainerDied","Data":"7f75ae72d35df3db616ecf7b760ed55ac7c3ac6efa3115f7bf88ab9aa55eeb57"} Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.920796 4703 scope.go:117] "RemoveContainer" containerID="2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.920896 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-299cf" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.938428 4703 scope.go:117] "RemoveContainer" containerID="2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a" Feb 02 13:09:58 crc kubenswrapper[4703]: E0202 13:09:58.939053 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a\": container with ID starting with 2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a not found: ID does not exist" containerID="2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.939096 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a"} err="failed to get container status \"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a\": rpc error: code = NotFound desc = could not find container \"2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a\": container with ID starting with 2e65d016d4b233a5f162f11fe49bdeb7a87605a073477347e8e2a1adc776372a not found: ID does not exist" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.940215 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-index-tbn87" podStartSLOduration=1.626019458 podStartE2EDuration="1.940198553s" podCreationTimestamp="2026-02-02 13:09:57 +0000 UTC" firstStartedPulling="2026-02-02 13:09:57.929774042 +0000 UTC m=+1124.944981576" lastFinishedPulling="2026-02-02 13:09:58.243953137 +0000 UTC m=+1125.259160671" observedRunningTime="2026-02-02 13:09:58.933161193 +0000 UTC m=+1125.948368757" watchObservedRunningTime="2026-02-02 13:09:58.940198553 +0000 UTC m=+1125.955406097" Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.960326 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:58 crc kubenswrapper[4703]: I0202 13:09:58.965940 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-index-299cf"] Feb 02 13:09:59 crc kubenswrapper[4703]: I0202 13:09:59.942240 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" path="/var/lib/kubelet/pods/8d0db345-6af9-4931-a733-dd7c6cdc8c54/volumes" Feb 02 13:10:07 crc kubenswrapper[4703]: I0202 13:10:07.507008 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:10:07 crc kubenswrapper[4703]: I0202 13:10:07.507453 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:10:07 crc kubenswrapper[4703]: I0202 13:10:07.540988 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:10:08 crc kubenswrapper[4703]: I0202 13:10:08.037069 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.809545 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s"] Feb 02 13:10:14 crc kubenswrapper[4703]: E0202 13:10:14.810663 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" containerName="registry-server" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.810684 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" containerName="registry-server" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.811076 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="8d0db345-6af9-4931-a733-dd7c6cdc8c54" containerName="registry-server" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.814337 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.817938 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"default-dockercfg-t9m2k" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.841818 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s"] Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.873935 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sjkds\" (UniqueName: \"kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.874072 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.874114 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.975302 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sjkds\" (UniqueName: \"kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.975381 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.975424 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.976127 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.977046 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:14 crc kubenswrapper[4703]: I0202 13:10:14.995354 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sjkds\" (UniqueName: \"kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds\") pod \"6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:15 crc kubenswrapper[4703]: I0202 13:10:15.132095 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:15 crc kubenswrapper[4703]: I0202 13:10:15.584883 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s"] Feb 02 13:10:16 crc kubenswrapper[4703]: I0202 13:10:16.055798 4703 generic.go:334] "Generic (PLEG): container finished" podID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerID="19012193497d1a378e86e5ef71271fc57bc9283da76bd10b8973562fdc70f5a9" exitCode=0 Feb 02 13:10:16 crc kubenswrapper[4703]: I0202 13:10:16.055849 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" event={"ID":"b77f27da-6dec-4980-87d9-7e31b34e413d","Type":"ContainerDied","Data":"19012193497d1a378e86e5ef71271fc57bc9283da76bd10b8973562fdc70f5a9"} Feb 02 13:10:16 crc kubenswrapper[4703]: I0202 13:10:16.055879 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" event={"ID":"b77f27da-6dec-4980-87d9-7e31b34e413d","Type":"ContainerStarted","Data":"ab194fc08338f1228f0d354a5f2bb5938c3d7690f391c08f9a914488cc487033"} Feb 02 13:10:17 crc kubenswrapper[4703]: I0202 13:10:17.063208 4703 generic.go:334] "Generic (PLEG): container finished" podID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerID="d0fad469510bf40a0243e72c3f70291266e246d08df711d6a046f44cd914c7ca" exitCode=0 Feb 02 13:10:17 crc kubenswrapper[4703]: I0202 13:10:17.063250 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" event={"ID":"b77f27da-6dec-4980-87d9-7e31b34e413d","Type":"ContainerDied","Data":"d0fad469510bf40a0243e72c3f70291266e246d08df711d6a046f44cd914c7ca"} Feb 02 13:10:18 crc kubenswrapper[4703]: I0202 13:10:18.076870 4703 generic.go:334] "Generic (PLEG): container finished" podID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerID="66e0f2dfb7fd00f8b497a45aa0a06bbe40c47ead365098938dea9649010b885e" exitCode=0 Feb 02 13:10:18 crc kubenswrapper[4703]: I0202 13:10:18.076997 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" event={"ID":"b77f27da-6dec-4980-87d9-7e31b34e413d","Type":"ContainerDied","Data":"66e0f2dfb7fd00f8b497a45aa0a06bbe40c47ead365098938dea9649010b885e"} Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.424647 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.548987 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sjkds\" (UniqueName: \"kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds\") pod \"b77f27da-6dec-4980-87d9-7e31b34e413d\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.549149 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util\") pod \"b77f27da-6dec-4980-87d9-7e31b34e413d\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.549178 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle\") pod \"b77f27da-6dec-4980-87d9-7e31b34e413d\" (UID: \"b77f27da-6dec-4980-87d9-7e31b34e413d\") " Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.550611 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle" (OuterVolumeSpecName: "bundle") pod "b77f27da-6dec-4980-87d9-7e31b34e413d" (UID: "b77f27da-6dec-4980-87d9-7e31b34e413d"). InnerVolumeSpecName "bundle". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.559586 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds" (OuterVolumeSpecName: "kube-api-access-sjkds") pod "b77f27da-6dec-4980-87d9-7e31b34e413d" (UID: "b77f27da-6dec-4980-87d9-7e31b34e413d"). InnerVolumeSpecName "kube-api-access-sjkds". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.565096 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util" (OuterVolumeSpecName: "util") pod "b77f27da-6dec-4980-87d9-7e31b34e413d" (UID: "b77f27da-6dec-4980-87d9-7e31b34e413d"). InnerVolumeSpecName "util". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.650954 4703 reconciler_common.go:293] "Volume detached for volume \"util\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-util\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.651016 4703 reconciler_common.go:293] "Volume detached for volume \"bundle\" (UniqueName: \"kubernetes.io/empty-dir/b77f27da-6dec-4980-87d9-7e31b34e413d-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:19 crc kubenswrapper[4703]: I0202 13:10:19.651045 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sjkds\" (UniqueName: \"kubernetes.io/projected/b77f27da-6dec-4980-87d9-7e31b34e413d-kube-api-access-sjkds\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:20 crc kubenswrapper[4703]: I0202 13:10:20.095657 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" event={"ID":"b77f27da-6dec-4980-87d9-7e31b34e413d","Type":"ContainerDied","Data":"ab194fc08338f1228f0d354a5f2bb5938c3d7690f391c08f9a914488cc487033"} Feb 02 13:10:20 crc kubenswrapper[4703]: I0202 13:10:20.095711 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="ab194fc08338f1228f0d354a5f2bb5938c3d7690f391c08f9a914488cc487033" Feb 02 13:10:20 crc kubenswrapper[4703]: I0202 13:10:20.095742 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s" Feb 02 13:10:20 crc kubenswrapper[4703]: E0202 13:10:20.133785 4703 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb77f27da_6dec_4980_87d9_7e31b34e413d.slice/crio-ab194fc08338f1228f0d354a5f2bb5938c3d7690f391c08f9a914488cc487033\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podb77f27da_6dec_4980_87d9_7e31b34e413d.slice\": RecentStats: unable to find data in memory cache]" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.608878 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:10:34 crc kubenswrapper[4703]: E0202 13:10:34.609619 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="pull" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.609635 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="pull" Feb 02 13:10:34 crc kubenswrapper[4703]: E0202 13:10:34.609647 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="util" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.609652 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="util" Feb 02 13:10:34 crc kubenswrapper[4703]: E0202 13:10:34.609662 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="extract" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.609668 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="extract" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.609788 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" containerName="extract" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.610212 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.612494 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-service-cert" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.612843 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openstack-operators"/"manila-operator-controller-manager-dockercfg-fkrhv" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.626328 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.700209 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.700681 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rmxz7\" (UniqueName: \"kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.700921 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.802200 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rmxz7\" (UniqueName: \"kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.802299 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.802354 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.808239 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.810492 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.818024 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rmxz7\" (UniqueName: \"kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7\") pod \"manila-operator-controller-manager-f55bcb75d-7r4kb\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:34 crc kubenswrapper[4703]: I0202 13:10:34.933646 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:35 crc kubenswrapper[4703]: I0202 13:10:35.358135 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:10:36 crc kubenswrapper[4703]: I0202 13:10:36.229566 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" event={"ID":"e9cf77c3-92e1-44dd-8390-be3e5845b011","Type":"ContainerStarted","Data":"5866e11d4c4ea4a29db85693e28171aa41996f173fe51d63c0b10c5837c7f884"} Feb 02 13:10:37 crc kubenswrapper[4703]: I0202 13:10:37.238756 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" event={"ID":"e9cf77c3-92e1-44dd-8390-be3e5845b011","Type":"ContainerStarted","Data":"fb0c5de87af8b6ad2c512285953b34ad44a6b93455ee84200dcb735f64300d9c"} Feb 02 13:10:37 crc kubenswrapper[4703]: I0202 13:10:37.239105 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:37 crc kubenswrapper[4703]: I0202 13:10:37.263109 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" podStartSLOduration=2.233568176 podStartE2EDuration="3.263090643s" podCreationTimestamp="2026-02-02 13:10:34 +0000 UTC" firstStartedPulling="2026-02-02 13:10:35.373696716 +0000 UTC m=+1162.388904250" lastFinishedPulling="2026-02-02 13:10:36.403219183 +0000 UTC m=+1163.418426717" observedRunningTime="2026-02-02 13:10:37.257755241 +0000 UTC m=+1164.272962765" watchObservedRunningTime="2026-02-02 13:10:37.263090643 +0000 UTC m=+1164.278298177" Feb 02 13:10:44 crc kubenswrapper[4703]: I0202 13:10:44.941049 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.334126 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-9697-account-create-update-4snxw"] Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.335360 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.339787 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.344705 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-qmnbh"] Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.346296 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.361522 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-9697-account-create-update-4snxw"] Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.371609 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-qmnbh"] Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.373451 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wmflt\" (UniqueName: \"kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.373526 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.373567 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-dkx7l\" (UniqueName: \"kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.373603 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.474910 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wmflt\" (UniqueName: \"kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.475030 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.475084 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-dkx7l\" (UniqueName: \"kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.475141 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.475873 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.476605 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.498538 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wmflt\" (UniqueName: \"kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt\") pod \"manila-9697-account-create-update-4snxw\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.505313 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-dkx7l\" (UniqueName: \"kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l\") pod \"manila-db-create-qmnbh\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.672492 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:46 crc kubenswrapper[4703]: I0202 13:10:46.680976 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.121914 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-qmnbh"] Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.187094 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-9697-account-create-update-4snxw"] Feb 02 13:10:47 crc kubenswrapper[4703]: W0202 13:10:47.191379 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc31a25e9_58d5_45c1_b62d_eaf54d4d3766.slice/crio-cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3 WatchSource:0}: Error finding container cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3: Status 404 returned error can't find the container with id cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3 Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.310011 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-qmnbh" event={"ID":"068b7b7f-2185-4928-adc3-99bd78652039","Type":"ContainerStarted","Data":"2e6ca297674ed59cad2e2be0d777b7b349f8ccacd0eaf527db3ee4008e8d5a2c"} Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.310390 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-qmnbh" event={"ID":"068b7b7f-2185-4928-adc3-99bd78652039","Type":"ContainerStarted","Data":"f9cd8abf47502d3c005f7d34944910bdf8e7b159978c4e34738b9bcb457b09b0"} Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.311622 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" event={"ID":"c31a25e9-58d5-45c1-b62d-eaf54d4d3766","Type":"ContainerStarted","Data":"cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3"} Feb 02 13:10:47 crc kubenswrapper[4703]: I0202 13:10:47.328880 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-create-qmnbh" podStartSLOduration=1.3288620899999999 podStartE2EDuration="1.32886209s" podCreationTimestamp="2026-02-02 13:10:46 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:10:47.325163575 +0000 UTC m=+1174.340371109" watchObservedRunningTime="2026-02-02 13:10:47.32886209 +0000 UTC m=+1174.344069624" Feb 02 13:10:48 crc kubenswrapper[4703]: I0202 13:10:48.320068 4703 generic.go:334] "Generic (PLEG): container finished" podID="068b7b7f-2185-4928-adc3-99bd78652039" containerID="2e6ca297674ed59cad2e2be0d777b7b349f8ccacd0eaf527db3ee4008e8d5a2c" exitCode=0 Feb 02 13:10:48 crc kubenswrapper[4703]: I0202 13:10:48.320123 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-qmnbh" event={"ID":"068b7b7f-2185-4928-adc3-99bd78652039","Type":"ContainerDied","Data":"2e6ca297674ed59cad2e2be0d777b7b349f8ccacd0eaf527db3ee4008e8d5a2c"} Feb 02 13:10:48 crc kubenswrapper[4703]: I0202 13:10:48.321980 4703 generic.go:334] "Generic (PLEG): container finished" podID="c31a25e9-58d5-45c1-b62d-eaf54d4d3766" containerID="a66d59070de56715719c365d9b133db8a9c42bf1972ab036a6a12c0491e0d614" exitCode=0 Feb 02 13:10:48 crc kubenswrapper[4703]: I0202 13:10:48.322014 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" event={"ID":"c31a25e9-58d5-45c1-b62d-eaf54d4d3766","Type":"ContainerDied","Data":"a66d59070de56715719c365d9b133db8a9c42bf1972ab036a6a12c0491e0d614"} Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.689003 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.755149 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.817900 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts\") pod \"068b7b7f-2185-4928-adc3-99bd78652039\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.818224 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts\") pod \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.818332 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wmflt\" (UniqueName: \"kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt\") pod \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\" (UID: \"c31a25e9-58d5-45c1-b62d-eaf54d4d3766\") " Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.818415 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dkx7l\" (UniqueName: \"kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l\") pod \"068b7b7f-2185-4928-adc3-99bd78652039\" (UID: \"068b7b7f-2185-4928-adc3-99bd78652039\") " Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.818823 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "068b7b7f-2185-4928-adc3-99bd78652039" (UID: "068b7b7f-2185-4928-adc3-99bd78652039"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.819188 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "c31a25e9-58d5-45c1-b62d-eaf54d4d3766" (UID: "c31a25e9-58d5-45c1-b62d-eaf54d4d3766"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.825818 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt" (OuterVolumeSpecName: "kube-api-access-wmflt") pod "c31a25e9-58d5-45c1-b62d-eaf54d4d3766" (UID: "c31a25e9-58d5-45c1-b62d-eaf54d4d3766"). InnerVolumeSpecName "kube-api-access-wmflt". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.826376 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l" (OuterVolumeSpecName: "kube-api-access-dkx7l") pod "068b7b7f-2185-4928-adc3-99bd78652039" (UID: "068b7b7f-2185-4928-adc3-99bd78652039"). InnerVolumeSpecName "kube-api-access-dkx7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.919922 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/068b7b7f-2185-4928-adc3-99bd78652039-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.920392 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.920466 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wmflt\" (UniqueName: \"kubernetes.io/projected/c31a25e9-58d5-45c1-b62d-eaf54d4d3766-kube-api-access-wmflt\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:49 crc kubenswrapper[4703]: I0202 13:10:49.920532 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dkx7l\" (UniqueName: \"kubernetes.io/projected/068b7b7f-2185-4928-adc3-99bd78652039-kube-api-access-dkx7l\") on node \"crc\" DevicePath \"\"" Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.336530 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.336735 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-9697-account-create-update-4snxw" event={"ID":"c31a25e9-58d5-45c1-b62d-eaf54d4d3766","Type":"ContainerDied","Data":"cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3"} Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.336960 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="cbebabe7b4d2546f57eb6e93474982e166757475ca6199db919e79d7d29beee3" Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.338629 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-qmnbh" event={"ID":"068b7b7f-2185-4928-adc3-99bd78652039","Type":"ContainerDied","Data":"f9cd8abf47502d3c005f7d34944910bdf8e7b159978c4e34738b9bcb457b09b0"} Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.338656 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f9cd8abf47502d3c005f7d34944910bdf8e7b159978c4e34738b9bcb457b09b0" Feb 02 13:10:50 crc kubenswrapper[4703]: I0202 13:10:50.338892 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-qmnbh" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.676394 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-qkvl5"] Feb 02 13:10:51 crc kubenswrapper[4703]: E0202 13:10:51.676659 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="068b7b7f-2185-4928-adc3-99bd78652039" containerName="mariadb-database-create" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.676675 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="068b7b7f-2185-4928-adc3-99bd78652039" containerName="mariadb-database-create" Feb 02 13:10:51 crc kubenswrapper[4703]: E0202 13:10:51.676700 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c31a25e9-58d5-45c1-b62d-eaf54d4d3766" containerName="mariadb-account-create-update" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.676706 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c31a25e9-58d5-45c1-b62d-eaf54d4d3766" containerName="mariadb-account-create-update" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.676827 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c31a25e9-58d5-45c1-b62d-eaf54d4d3766" containerName="mariadb-account-create-update" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.676848 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="068b7b7f-2185-4928-adc3-99bd78652039" containerName="mariadb-database-create" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.677238 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.679546 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.679854 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-x8qmg" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.689905 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-qkvl5"] Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.849776 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9xf62\" (UniqueName: \"kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.849945 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.849985 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.956984 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9xf62\" (UniqueName: \"kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.957155 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.957180 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.966836 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.967906 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:51 crc kubenswrapper[4703]: I0202 13:10:51.978587 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9xf62\" (UniqueName: \"kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62\") pod \"manila-db-sync-qkvl5\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:52 crc kubenswrapper[4703]: I0202 13:10:52.004656 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:10:52 crc kubenswrapper[4703]: I0202 13:10:52.477096 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-qkvl5"] Feb 02 13:10:52 crc kubenswrapper[4703]: W0202 13:10:52.485753 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod92c3b050_25a6_43c6_ac08_1511284f2b96.slice/crio-621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff WatchSource:0}: Error finding container 621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff: Status 404 returned error can't find the container with id 621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff Feb 02 13:10:53 crc kubenswrapper[4703]: I0202 13:10:53.380740 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-qkvl5" event={"ID":"92c3b050-25a6-43c6-ac08-1511284f2b96","Type":"ContainerStarted","Data":"621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff"} Feb 02 13:10:57 crc kubenswrapper[4703]: I0202 13:10:57.408218 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-qkvl5" event={"ID":"92c3b050-25a6-43c6-ac08-1511284f2b96","Type":"ContainerStarted","Data":"bc428931cc4afb49b9715e8947ae0f071ff5fc6b16d99c9fa5a91440b2ed6440"} Feb 02 13:10:57 crc kubenswrapper[4703]: I0202 13:10:57.424068 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-qkvl5" podStartSLOduration=2.285290124 podStartE2EDuration="6.424047907s" podCreationTimestamp="2026-02-02 13:10:51 +0000 UTC" firstStartedPulling="2026-02-02 13:10:52.487912925 +0000 UTC m=+1179.503120459" lastFinishedPulling="2026-02-02 13:10:56.626670708 +0000 UTC m=+1183.641878242" observedRunningTime="2026-02-02 13:10:57.423713608 +0000 UTC m=+1184.438921152" watchObservedRunningTime="2026-02-02 13:10:57.424047907 +0000 UTC m=+1184.439255451" Feb 02 13:11:10 crc kubenswrapper[4703]: I0202 13:11:10.504917 4703 generic.go:334] "Generic (PLEG): container finished" podID="92c3b050-25a6-43c6-ac08-1511284f2b96" containerID="bc428931cc4afb49b9715e8947ae0f071ff5fc6b16d99c9fa5a91440b2ed6440" exitCode=0 Feb 02 13:11:10 crc kubenswrapper[4703]: I0202 13:11:10.505001 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-qkvl5" event={"ID":"92c3b050-25a6-43c6-ac08-1511284f2b96","Type":"ContainerDied","Data":"bc428931cc4afb49b9715e8947ae0f071ff5fc6b16d99c9fa5a91440b2ed6440"} Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.774843 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.854749 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data\") pod \"92c3b050-25a6-43c6-ac08-1511284f2b96\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.854823 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data\") pod \"92c3b050-25a6-43c6-ac08-1511284f2b96\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.854880 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9xf62\" (UniqueName: \"kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62\") pod \"92c3b050-25a6-43c6-ac08-1511284f2b96\" (UID: \"92c3b050-25a6-43c6-ac08-1511284f2b96\") " Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.865693 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "92c3b050-25a6-43c6-ac08-1511284f2b96" (UID: "92c3b050-25a6-43c6-ac08-1511284f2b96"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.865926 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62" (OuterVolumeSpecName: "kube-api-access-9xf62") pod "92c3b050-25a6-43c6-ac08-1511284f2b96" (UID: "92c3b050-25a6-43c6-ac08-1511284f2b96"). InnerVolumeSpecName "kube-api-access-9xf62". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.867826 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data" (OuterVolumeSpecName: "config-data") pod "92c3b050-25a6-43c6-ac08-1511284f2b96" (UID: "92c3b050-25a6-43c6-ac08-1511284f2b96"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.956277 4703 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.956315 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/92c3b050-25a6-43c6-ac08-1511284f2b96-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:11:11 crc kubenswrapper[4703]: I0202 13:11:11.956323 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9xf62\" (UniqueName: \"kubernetes.io/projected/92c3b050-25a6-43c6-ac08-1511284f2b96-kube-api-access-9xf62\") on node \"crc\" DevicePath \"\"" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.518814 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-qkvl5" event={"ID":"92c3b050-25a6-43c6-ac08-1511284f2b96","Type":"ContainerDied","Data":"621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff"} Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.518857 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="621b534e71e07241c3d8848480c42c21ebad418de159d443fd4d6e0c140953ff" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.518856 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-qkvl5" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.754755 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:11:12 crc kubenswrapper[4703]: E0202 13:11:12.755109 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92c3b050-25a6-43c6-ac08-1511284f2b96" containerName="manila-db-sync" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.755130 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="92c3b050-25a6-43c6-ac08-1511284f2b96" containerName="manila-db-sync" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.755317 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="92c3b050-25a6-43c6-ac08-1511284f2b96" containerName="manila-db-sync" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.760850 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.776240 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.776249 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-x8qmg" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.776616 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.777015 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.778018 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.812430 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.825577 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.825999 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.828795 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.838985 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869085 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869175 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869228 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869267 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869345 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-wkjpj\" (UniqueName: \"kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869413 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.869434 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970704 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970847 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970872 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970794 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970942 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.970961 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-pfs2n\" (UniqueName: \"kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971251 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971317 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971338 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-wkjpj\" (UniqueName: \"kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971380 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971405 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971428 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971454 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.971484 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.975889 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.976171 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.976629 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.976781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:12 crc kubenswrapper[4703]: I0202 13:11:12.991506 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-wkjpj\" (UniqueName: \"kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj\") pod \"manila-share-share0-0\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.048334 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.049471 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: W0202 13:11:13.054930 4703 reflector.go:561] object-"manila-kuttl-tests"/"manila-api-config-data": failed to list *v1.Secret: secrets "manila-api-config-data" is forbidden: User "system:node:crc" cannot list resource "secrets" in API group "" in the namespace "manila-kuttl-tests": no relationship found between node 'crc' and this object Feb 02 13:11:13 crc kubenswrapper[4703]: E0202 13:11:13.054996 4703 reflector.go:158] "Unhandled Error" err="object-\"manila-kuttl-tests\"/\"manila-api-config-data\": Failed to watch *v1.Secret: failed to list *v1.Secret: secrets \"manila-api-config-data\" is forbidden: User \"system:node:crc\" cannot list resource \"secrets\" in API group \"\" in the namespace \"manila-kuttl-tests\": no relationship found between node 'crc' and this object" logger="UnhandledError" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.072972 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.073033 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-pfs2n\" (UniqueName: \"kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.073084 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.073118 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.073139 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.073543 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.075981 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.077389 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.077715 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.077980 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.098279 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.106634 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-pfs2n\" (UniqueName: \"kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n\") pod \"manila-scheduler-0\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.162647 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.174671 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.174859 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.174964 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.175077 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.175169 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c4mpd\" (UniqueName: \"kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.175240 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.276554 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.276926 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277125 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277154 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277564 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277636 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277802 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c4mpd\" (UniqueName: \"kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.277755 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.284652 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.285236 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.300013 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c4mpd\" (UniqueName: \"kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.582645 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:11:13 crc kubenswrapper[4703]: I0202 13:11:13.662117 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:11:13 crc kubenswrapper[4703]: W0202 13:11:13.671925 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod3d46d7ce_93f4_4247_9d5d_800618bfd04f.slice/crio-27326a00b6fee502023de7b1cf616af859185343a2d20a21b812503481294b00 WatchSource:0}: Error finding container 27326a00b6fee502023de7b1cf616af859185343a2d20a21b812503481294b00: Status 404 returned error can't find the container with id 27326a00b6fee502023de7b1cf616af859185343a2d20a21b812503481294b00 Feb 02 13:11:14 crc kubenswrapper[4703]: E0202 13:11:14.277614 4703 secret.go:188] Couldn't get secret manila-kuttl-tests/manila-api-config-data: failed to sync secret cache: timed out waiting for the condition Feb 02 13:11:14 crc kubenswrapper[4703]: E0202 13:11:14.277980 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom podName:67e0fa34-87a7-481c-bd0e-a281ae56e369 nodeName:}" failed. No retries permitted until 2026-02-02 13:11:14.777947176 +0000 UTC m=+1201.793154710 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "config-data-custom" (UniqueName: "kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom") pod "manila-api-0" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369") : failed to sync secret cache: timed out waiting for the condition Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.282850 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.533954 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerStarted","Data":"987fa0fda2d45a4f28f112ec707cce84c3faf9da0a2548370018b41538d46b97"} Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.535726 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerStarted","Data":"27326a00b6fee502023de7b1cf616af859185343a2d20a21b812503481294b00"} Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.798495 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.802822 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") pod \"manila-api-0\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:14 crc kubenswrapper[4703]: I0202 13:11:14.866749 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.130130 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:11:15 crc kubenswrapper[4703]: W0202 13:11:15.152132 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod67e0fa34_87a7_481c_bd0e_a281ae56e369.slice/crio-c9ded08cb8967a93f1a6fb77498b99ef275e62952d82d916484ecdaad95e93e3 WatchSource:0}: Error finding container c9ded08cb8967a93f1a6fb77498b99ef275e62952d82d916484ecdaad95e93e3: Status 404 returned error can't find the container with id c9ded08cb8967a93f1a6fb77498b99ef275e62952d82d916484ecdaad95e93e3 Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.549267 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerStarted","Data":"c9ded08cb8967a93f1a6fb77498b99ef275e62952d82d916484ecdaad95e93e3"} Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.550881 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerStarted","Data":"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718"} Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.550954 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerStarted","Data":"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d"} Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.571144 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=2.8693059720000003 podStartE2EDuration="3.571130237s" podCreationTimestamp="2026-02-02 13:11:12 +0000 UTC" firstStartedPulling="2026-02-02 13:11:13.674012571 +0000 UTC m=+1200.689220095" lastFinishedPulling="2026-02-02 13:11:14.375836826 +0000 UTC m=+1201.391044360" observedRunningTime="2026-02-02 13:11:15.570752877 +0000 UTC m=+1202.585960421" watchObservedRunningTime="2026-02-02 13:11:15.571130237 +0000 UTC m=+1202.586337771" Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.984971 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:11:15 crc kubenswrapper[4703]: I0202 13:11:15.985437 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:11:16 crc kubenswrapper[4703]: I0202 13:11:16.565165 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerStarted","Data":"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f"} Feb 02 13:11:16 crc kubenswrapper[4703]: I0202 13:11:16.565598 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerStarted","Data":"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74"} Feb 02 13:11:16 crc kubenswrapper[4703]: I0202 13:11:16.565637 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:19 crc kubenswrapper[4703]: I0202 13:11:19.586590 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerStarted","Data":"ea0bcdeb2fef6364f147105cf4f3aaaef7eb70da891e4a9809aa3f73a1e5ee1d"} Feb 02 13:11:20 crc kubenswrapper[4703]: I0202 13:11:20.604120 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerStarted","Data":"567ded5ea4f19c852794586504b183e6a837fbf9b62a5ec6e12a3b16689fc8a1"} Feb 02 13:11:20 crc kubenswrapper[4703]: I0202 13:11:20.629835 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=3.225922467 podStartE2EDuration="8.629815621s" podCreationTimestamp="2026-02-02 13:11:12 +0000 UTC" firstStartedPulling="2026-02-02 13:11:13.588433862 +0000 UTC m=+1200.603641386" lastFinishedPulling="2026-02-02 13:11:18.992327006 +0000 UTC m=+1206.007534540" observedRunningTime="2026-02-02 13:11:20.62309592 +0000 UTC m=+1207.638303464" watchObservedRunningTime="2026-02-02 13:11:20.629815621 +0000 UTC m=+1207.645023155" Feb 02 13:11:20 crc kubenswrapper[4703]: I0202 13:11:20.630756 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=7.630747568 podStartE2EDuration="7.630747568s" podCreationTimestamp="2026-02-02 13:11:13 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:11:16.588197198 +0000 UTC m=+1203.603404732" watchObservedRunningTime="2026-02-02 13:11:20.630747568 +0000 UTC m=+1207.645955102" Feb 02 13:11:23 crc kubenswrapper[4703]: I0202 13:11:23.078552 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:23 crc kubenswrapper[4703]: I0202 13:11:23.163805 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:34 crc kubenswrapper[4703]: I0202 13:11:34.682837 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:11:34 crc kubenswrapper[4703]: I0202 13:11:34.920107 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:11:36 crc kubenswrapper[4703]: I0202 13:11:36.173405 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.327495 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.329741 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.340614 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.342116 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.354195 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.354951 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kblmq\" (UniqueName: \"kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355089 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355190 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355317 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355428 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355384 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355665 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rlwnl\" (UniqueName: \"kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355761 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355870 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.355983 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.356086 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.356640 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.365184 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.457889 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rlwnl\" (UniqueName: \"kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.457937 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.457964 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.457979 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.457995 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458026 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458056 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458096 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kblmq\" (UniqueName: \"kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458122 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458138 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458158 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458174 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.458375 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.459700 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.459947 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.459997 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.463781 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.464137 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.464520 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.464595 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.465129 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.466242 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.475581 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rlwnl\" (UniqueName: \"kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl\") pod \"manila-api-1\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.487902 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kblmq\" (UniqueName: \"kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq\") pod \"manila-api-2\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.650751 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:37 crc kubenswrapper[4703]: I0202 13:11:37.661054 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.086148 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:11:38 crc kubenswrapper[4703]: W0202 13:11:38.089791 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda3dfad47_2e05_44c2_a196_88fd067111f0.slice/crio-aa1f65c6b0020c964771284f24f5f5b7717e81e85f00a0de3513e7aec023ad84 WatchSource:0}: Error finding container aa1f65c6b0020c964771284f24f5f5b7717e81e85f00a0de3513e7aec023ad84: Status 404 returned error can't find the container with id aa1f65c6b0020c964771284f24f5f5b7717e81e85f00a0de3513e7aec023ad84 Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.090884 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:11:38 crc kubenswrapper[4703]: W0202 13:11:38.098036 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podb6f2e96b_baaf_4e76_8514_03639d833a55.slice/crio-3a54ec3d42b5fb229485ef531bf13845969bb5188d22bd53c659a708cced9d09 WatchSource:0}: Error finding container 3a54ec3d42b5fb229485ef531bf13845969bb5188d22bd53c659a708cced9d09: Status 404 returned error can't find the container with id 3a54ec3d42b5fb229485ef531bf13845969bb5188d22bd53c659a708cced9d09 Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.725418 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerStarted","Data":"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0"} Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.725983 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerStarted","Data":"3a54ec3d42b5fb229485ef531bf13845969bb5188d22bd53c659a708cced9d09"} Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.728674 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerStarted","Data":"6a35b38dcbff9340b8967759f30e0ac64c5781dd651a9532c3b022630de1a4c3"} Feb 02 13:11:38 crc kubenswrapper[4703]: I0202 13:11:38.728730 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerStarted","Data":"aa1f65c6b0020c964771284f24f5f5b7717e81e85f00a0de3513e7aec023ad84"} Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.739866 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerStarted","Data":"6ef6a76ecb386c4b66bd44df569d836031e562da1be69dc88621f94c722b763c"} Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.740448 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.743827 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerStarted","Data":"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466"} Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.744049 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.773243 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-2" podStartSLOduration=2.773214962 podStartE2EDuration="2.773214962s" podCreationTimestamp="2026-02-02 13:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:11:39.766212542 +0000 UTC m=+1226.781420076" watchObservedRunningTime="2026-02-02 13:11:39.773214962 +0000 UTC m=+1226.788422506" Feb 02 13:11:39 crc kubenswrapper[4703]: I0202 13:11:39.785421 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-1" podStartSLOduration=2.785403609 podStartE2EDuration="2.785403609s" podCreationTimestamp="2026-02-02 13:11:37 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:11:39.782213008 +0000 UTC m=+1226.797420572" watchObservedRunningTime="2026-02-02 13:11:39.785403609 +0000 UTC m=+1226.800611143" Feb 02 13:11:45 crc kubenswrapper[4703]: I0202 13:11:45.985032 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:11:45 crc kubenswrapper[4703]: I0202 13:11:45.985710 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:11:58 crc kubenswrapper[4703]: I0202 13:11:58.887405 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:11:58 crc kubenswrapper[4703]: I0202 13:11:58.946710 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.190958 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.191479 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-2" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api-log" containerID="cri-o://6a35b38dcbff9340b8967759f30e0ac64c5781dd651a9532c3b022630de1a4c3" gracePeriod=30 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.191601 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-2" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api" containerID="cri-o://6ef6a76ecb386c4b66bd44df569d836031e562da1be69dc88621f94c722b763c" gracePeriod=30 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.201075 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.209010 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-1" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api-log" containerID="cri-o://236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0" gracePeriod=30 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.209458 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-1" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api" containerID="cri-o://2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466" gracePeriod=30 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.940898 4703 generic.go:334] "Generic (PLEG): container finished" podID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerID="236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0" exitCode=143 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.941348 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerDied","Data":"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0"} Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.943965 4703 generic.go:334] "Generic (PLEG): container finished" podID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerID="6a35b38dcbff9340b8967759f30e0ac64c5781dd651a9532c3b022630de1a4c3" exitCode=143 Feb 02 13:12:00 crc kubenswrapper[4703]: I0202 13:12:00.944032 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerDied","Data":"6a35b38dcbff9340b8967759f30e0ac64c5781dd651a9532c3b022630de1a4c3"} Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.854947 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881127 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881172 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881199 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881243 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlwnl\" (UniqueName: \"kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881291 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.881349 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs\") pod \"b6f2e96b-baaf-4e76-8514-03639d833a55\" (UID: \"b6f2e96b-baaf-4e76-8514-03639d833a55\") " Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.883349 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.883442 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs" (OuterVolumeSpecName: "logs") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.888680 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts" (OuterVolumeSpecName: "scripts") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.889042 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.891984 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl" (OuterVolumeSpecName: "kube-api-access-rlwnl") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "kube-api-access-rlwnl". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.933315 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data" (OuterVolumeSpecName: "config-data") pod "b6f2e96b-baaf-4e76-8514-03639d833a55" (UID: "b6f2e96b-baaf-4e76-8514-03639d833a55"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.966604 4703 generic.go:334] "Generic (PLEG): container finished" podID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerID="2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466" exitCode=0 Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.966668 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerDied","Data":"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466"} Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.966695 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-1" event={"ID":"b6f2e96b-baaf-4e76-8514-03639d833a55","Type":"ContainerDied","Data":"3a54ec3d42b5fb229485ef531bf13845969bb5188d22bd53c659a708cced9d09"} Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.966710 4703 scope.go:117] "RemoveContainer" containerID="2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.966828 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-1" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.972050 4703 generic.go:334] "Generic (PLEG): container finished" podID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerID="6ef6a76ecb386c4b66bd44df569d836031e562da1be69dc88621f94c722b763c" exitCode=0 Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.972097 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerDied","Data":"6ef6a76ecb386c4b66bd44df569d836031e562da1be69dc88621f94c722b763c"} Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984909 4703 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/b6f2e96b-baaf-4e76-8514-03639d833a55-logs\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984940 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984951 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/b6f2e96b-baaf-4e76-8514-03639d833a55-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984960 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984970 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlwnl\" (UniqueName: \"kubernetes.io/projected/b6f2e96b-baaf-4e76-8514-03639d833a55-kube-api-access-rlwnl\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.984978 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/b6f2e96b-baaf-4e76-8514-03639d833a55-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.989765 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.990877 4703 scope.go:117] "RemoveContainer" containerID="236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0" Feb 02 13:12:03 crc kubenswrapper[4703]: I0202 13:12:03.994819 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-1"] Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.005086 4703 scope.go:117] "RemoveContainer" containerID="2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466" Feb 02 13:12:04 crc kubenswrapper[4703]: E0202 13:12:04.005553 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466\": container with ID starting with 2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466 not found: ID does not exist" containerID="2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.005588 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466"} err="failed to get container status \"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466\": rpc error: code = NotFound desc = could not find container \"2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466\": container with ID starting with 2f395b50c281725a5be454c40040efe21e61f2215864c56337d43cc67f316466 not found: ID does not exist" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.005609 4703 scope.go:117] "RemoveContainer" containerID="236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0" Feb 02 13:12:04 crc kubenswrapper[4703]: E0202 13:12:04.005945 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0\": container with ID starting with 236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0 not found: ID does not exist" containerID="236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.005961 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0"} err="failed to get container status \"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0\": rpc error: code = NotFound desc = could not find container \"236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0\": container with ID starting with 236453e8998cb925d425e60f69978ba17e21bb78ae9f180ed079185c9dae59c0 not found: ID does not exist" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.220139 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327213 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kblmq\" (UniqueName: \"kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327316 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327347 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327401 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327423 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327466 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts\") pod \"a3dfad47-2e05-44c2-a196-88fd067111f0\" (UID: \"a3dfad47-2e05-44c2-a196-88fd067111f0\") " Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327575 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.327780 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/a3dfad47-2e05-44c2-a196-88fd067111f0-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.328219 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs" (OuterVolumeSpecName: "logs") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.331337 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts" (OuterVolumeSpecName: "scripts") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.331790 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq" (OuterVolumeSpecName: "kube-api-access-kblmq") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "kube-api-access-kblmq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.332130 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.364311 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data" (OuterVolumeSpecName: "config-data") pod "a3dfad47-2e05-44c2-a196-88fd067111f0" (UID: "a3dfad47-2e05-44c2-a196-88fd067111f0"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.429100 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.429133 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.429156 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/a3dfad47-2e05-44c2-a196-88fd067111f0-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.429166 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kblmq\" (UniqueName: \"kubernetes.io/projected/a3dfad47-2e05-44c2-a196-88fd067111f0-kube-api-access-kblmq\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.429176 4703 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/a3dfad47-2e05-44c2-a196-88fd067111f0-logs\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.982762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-2" event={"ID":"a3dfad47-2e05-44c2-a196-88fd067111f0","Type":"ContainerDied","Data":"aa1f65c6b0020c964771284f24f5f5b7717e81e85f00a0de3513e7aec023ad84"} Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.982816 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-2" Feb 02 13:12:04 crc kubenswrapper[4703]: I0202 13:12:04.983107 4703 scope.go:117] "RemoveContainer" containerID="6ef6a76ecb386c4b66bd44df569d836031e562da1be69dc88621f94c722b763c" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.028711 4703 scope.go:117] "RemoveContainer" containerID="6a35b38dcbff9340b8967759f30e0ac64c5781dd651a9532c3b022630de1a4c3" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.034265 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.042524 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-2"] Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.467120 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:05 crc kubenswrapper[4703]: E0202 13:12:05.467531 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.467552 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: E0202 13:12:05.467587 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.467601 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: E0202 13:12:05.467621 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.467633 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: E0202 13:12:05.467664 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.467677 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.468115 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.468154 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.468198 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.468224 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" containerName="manila-api-log" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.469819 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.482753 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.543174 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.543429 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-22djh\" (UniqueName: \"kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.543495 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.543555 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.543666 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.645394 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-22djh\" (UniqueName: \"kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.645464 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.645498 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.645528 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.645596 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.646107 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.650928 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.651403 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.651448 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.660761 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-22djh\" (UniqueName: \"kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh\") pod \"manila-scheduler-1\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.801100 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.952539 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a3dfad47-2e05-44c2-a196-88fd067111f0" path="/var/lib/kubelet/pods/a3dfad47-2e05-44c2-a196-88fd067111f0/volumes" Feb 02 13:12:05 crc kubenswrapper[4703]: I0202 13:12:05.953748 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b6f2e96b-baaf-4e76-8514-03639d833a55" path="/var/lib/kubelet/pods/b6f2e96b-baaf-4e76-8514-03639d833a55/volumes" Feb 02 13:12:06 crc kubenswrapper[4703]: I0202 13:12:06.073381 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:07 crc kubenswrapper[4703]: I0202 13:12:07.002249 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerStarted","Data":"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af"} Feb 02 13:12:07 crc kubenswrapper[4703]: I0202 13:12:07.002726 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerStarted","Data":"c9a7fb237d74eb8b65d7207275b99885b584eb56e4e2c4a35c91956fe71316bf"} Feb 02 13:12:08 crc kubenswrapper[4703]: I0202 13:12:08.011146 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerStarted","Data":"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1"} Feb 02 13:12:08 crc kubenswrapper[4703]: I0202 13:12:08.042643 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-1" podStartSLOduration=3.042616371 podStartE2EDuration="3.042616371s" podCreationTimestamp="2026-02-02 13:12:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:12:08.035360167 +0000 UTC m=+1255.050567721" watchObservedRunningTime="2026-02-02 13:12:08.042616371 +0000 UTC m=+1255.057823945" Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.802348 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.984816 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.984911 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.984965 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.985782 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:12:15 crc kubenswrapper[4703]: I0202 13:12:15.985875 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2" gracePeriod=600 Feb 02 13:12:17 crc kubenswrapper[4703]: I0202 13:12:17.099575 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2" exitCode=0 Feb 02 13:12:17 crc kubenswrapper[4703]: I0202 13:12:17.099671 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2"} Feb 02 13:12:17 crc kubenswrapper[4703]: I0202 13:12:17.101631 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a"} Feb 02 13:12:17 crc kubenswrapper[4703]: I0202 13:12:17.101684 4703 scope.go:117] "RemoveContainer" containerID="f45a976ef2978692f05e47b584c70f23b0b16afa947c9943af0093366493b355" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.286735 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.360561 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.361825 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.378377 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.445650 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.445777 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.445822 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.445854 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.445870 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rpm66\" (UniqueName: \"kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546701 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546791 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546833 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546875 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546875 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.546896 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rpm66\" (UniqueName: \"kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.554825 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.555052 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.561158 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.564930 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rpm66\" (UniqueName: \"kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66\") pod \"manila-scheduler-2\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.682305 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:27 crc kubenswrapper[4703]: I0202 13:12:27.904446 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:28 crc kubenswrapper[4703]: I0202 13:12:28.183994 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerStarted","Data":"53c9d32cf646783f63cda814fe10267738d6086565e7b1c6ee5e2ac34b437b3d"} Feb 02 13:12:29 crc kubenswrapper[4703]: I0202 13:12:29.193083 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerStarted","Data":"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9"} Feb 02 13:12:29 crc kubenswrapper[4703]: I0202 13:12:29.194354 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerStarted","Data":"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986"} Feb 02 13:12:29 crc kubenswrapper[4703]: I0202 13:12:29.208005 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-2" podStartSLOduration=2.207989006 podStartE2EDuration="2.207989006s" podCreationTimestamp="2026-02-02 13:12:27 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:12:29.207117591 +0000 UTC m=+1276.222325125" watchObservedRunningTime="2026-02-02 13:12:29.207989006 +0000 UTC m=+1276.223196540" Feb 02 13:12:37 crc kubenswrapper[4703]: I0202 13:12:37.682593 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:49 crc kubenswrapper[4703]: I0202 13:12:49.208843 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.550520 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-qkvl5"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.561849 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-qkvl5"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.568634 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.568967 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-1" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="manila-scheduler" containerID="cri-o://ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.569020 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-1" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="probe" containerID="cri-o://37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.578746 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.579036 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="manila-scheduler" containerID="cri-o://80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.579115 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="probe" containerID="cri-o://768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.590595 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.590846 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-2" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="manila-scheduler" containerID="cri-o://6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.590894 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-2" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="probe" containerID="cri-o://5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.611143 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.611486 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="manila-share" containerID="cri-o://ea0bcdeb2fef6364f147105cf4f3aaaef7eb70da891e4a9809aa3f73a1e5ee1d" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.611590 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="probe" containerID="cri-o://567ded5ea4f19c852794586504b183e6a837fbf9b62a5ec6e12a3b16689fc8a1" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.626122 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila9697-account-delete-zk8sh"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.627266 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.648921 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila9697-account-delete-zk8sh"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.658497 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.658766 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api-log" containerID="cri-o://f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.658799 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api" containerID="cri-o://bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f" gracePeriod=30 Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.821324 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.821691 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-sglsn\" (UniqueName: \"kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.922608 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-sglsn\" (UniqueName: \"kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.922723 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.923535 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.951962 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-sglsn\" (UniqueName: \"kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn\") pod \"manila9697-account-delete-zk8sh\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:50 crc kubenswrapper[4703]: I0202 13:12:50.953664 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.215892 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila9697-account-delete-zk8sh"] Feb 02 13:12:51 crc kubenswrapper[4703]: W0202 13:12:51.227738 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod86c3f982_e5d6_4dcc_8d0a_8e53eeb566bb.slice/crio-3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a WatchSource:0}: Error finding container 3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a: Status 404 returned error can't find the container with id 3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.359100 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" event={"ID":"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb","Type":"ContainerStarted","Data":"5253613ef8830ff6e4c45f7fc6033227ae263832b3cc56b5b7de0cef105fb946"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.359144 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" event={"ID":"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb","Type":"ContainerStarted","Data":"3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.363202 4703 generic.go:334] "Generic (PLEG): container finished" podID="216a3968-f8c5-466f-bbd3-22447464e608" containerID="567ded5ea4f19c852794586504b183e6a837fbf9b62a5ec6e12a3b16689fc8a1" exitCode=0 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.363229 4703 generic.go:334] "Generic (PLEG): container finished" podID="216a3968-f8c5-466f-bbd3-22447464e608" containerID="ea0bcdeb2fef6364f147105cf4f3aaaef7eb70da891e4a9809aa3f73a1e5ee1d" exitCode=1 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.363295 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerDied","Data":"567ded5ea4f19c852794586504b183e6a837fbf9b62a5ec6e12a3b16689fc8a1"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.363319 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerDied","Data":"ea0bcdeb2fef6364f147105cf4f3aaaef7eb70da891e4a9809aa3f73a1e5ee1d"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.377154 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" podStartSLOduration=1.377131273 podStartE2EDuration="1.377131273s" podCreationTimestamp="2026-02-02 13:12:50 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:12:51.375097786 +0000 UTC m=+1298.390305330" watchObservedRunningTime="2026-02-02 13:12:51.377131273 +0000 UTC m=+1298.392338807" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.380548 4703 generic.go:334] "Generic (PLEG): container finished" podID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerID="5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9" exitCode=0 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.380650 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerDied","Data":"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.383944 4703 generic.go:334] "Generic (PLEG): container finished" podID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerID="37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1" exitCode=0 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.384011 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerDied","Data":"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.386688 4703 generic.go:334] "Generic (PLEG): container finished" podID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerID="768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718" exitCode=0 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.386763 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerDied","Data":"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.388892 4703 generic.go:334] "Generic (PLEG): container finished" podID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerID="f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74" exitCode=143 Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.388930 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerDied","Data":"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74"} Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.482199 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.632843 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.632895 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.632921 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.632978 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.632999 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.633036 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wkjpj\" (UniqueName: \"kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.633087 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.633150 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom\") pod \"216a3968-f8c5-466f-bbd3-22447464e608\" (UID: \"216a3968-f8c5-466f-bbd3-22447464e608\") " Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.633587 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.634028 4703 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-var-lib-manila\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.638183 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.638303 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj" (OuterVolumeSpecName: "kube-api-access-wkjpj") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "kube-api-access-wkjpj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.638396 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph" (OuterVolumeSpecName: "ceph") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.638416 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts" (OuterVolumeSpecName: "scripts") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.695702 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data" (OuterVolumeSpecName: "config-data") pod "216a3968-f8c5-466f-bbd3-22447464e608" (UID: "216a3968-f8c5-466f-bbd3-22447464e608"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737587 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737617 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/216a3968-f8c5-466f-bbd3-22447464e608-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737626 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737637 4703 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737646 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wkjpj\" (UniqueName: \"kubernetes.io/projected/216a3968-f8c5-466f-bbd3-22447464e608-kube-api-access-wkjpj\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.737656 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/216a3968-f8c5-466f-bbd3-22447464e608-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:51 crc kubenswrapper[4703]: I0202 13:12:51.946960 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92c3b050-25a6-43c6-ac08-1511284f2b96" path="/var/lib/kubelet/pods/92c3b050-25a6-43c6-ac08-1511284f2b96/volumes" Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.396825 4703 generic.go:334] "Generic (PLEG): container finished" podID="86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" containerID="5253613ef8830ff6e4c45f7fc6033227ae263832b3cc56b5b7de0cef105fb946" exitCode=0 Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.396869 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" event={"ID":"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb","Type":"ContainerDied","Data":"5253613ef8830ff6e4c45f7fc6033227ae263832b3cc56b5b7de0cef105fb946"} Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.399702 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"216a3968-f8c5-466f-bbd3-22447464e608","Type":"ContainerDied","Data":"987fa0fda2d45a4f28f112ec707cce84c3faf9da0a2548370018b41538d46b97"} Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.399723 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.399747 4703 scope.go:117] "RemoveContainer" containerID="567ded5ea4f19c852794586504b183e6a837fbf9b62a5ec6e12a3b16689fc8a1" Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.422803 4703 scope.go:117] "RemoveContainer" containerID="ea0bcdeb2fef6364f147105cf4f3aaaef7eb70da891e4a9809aa3f73a1e5ee1d" Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.438486 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:12:52 crc kubenswrapper[4703]: I0202 13:12:52.443135 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.852385 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.952692 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="216a3968-f8c5-466f-bbd3-22447464e608" path="/var/lib/kubelet/pods/216a3968-f8c5-466f-bbd3-22447464e608/volumes" Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.961627 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.967634 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts\") pod \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.967692 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-sglsn\" (UniqueName: \"kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn\") pod \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\" (UID: \"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb\") " Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.969075 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" (UID: "86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.979536 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn" (OuterVolumeSpecName: "kube-api-access-sglsn") pod "86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" (UID: "86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb"). InnerVolumeSpecName "kube-api-access-sglsn". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:53 crc kubenswrapper[4703]: I0202 13:12:53.987958 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069439 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-pfs2n\" (UniqueName: \"kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n\") pod \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069523 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id\") pod \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069599 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts\") pod \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069631 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom\") pod \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069659 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data\") pod \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\" (UID: \"3d46d7ce-93f4-4247-9d5d-800618bfd04f\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069945 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.069958 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-sglsn\" (UniqueName: \"kubernetes.io/projected/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb-kube-api-access-sglsn\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.070552 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "3d46d7ce-93f4-4247-9d5d-800618bfd04f" (UID: "3d46d7ce-93f4-4247-9d5d-800618bfd04f"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.072665 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n" (OuterVolumeSpecName: "kube-api-access-pfs2n") pod "3d46d7ce-93f4-4247-9d5d-800618bfd04f" (UID: "3d46d7ce-93f4-4247-9d5d-800618bfd04f"). InnerVolumeSpecName "kube-api-access-pfs2n". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.073732 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts" (OuterVolumeSpecName: "scripts") pod "3d46d7ce-93f4-4247-9d5d-800618bfd04f" (UID: "3d46d7ce-93f4-4247-9d5d-800618bfd04f"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.075158 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "3d46d7ce-93f4-4247-9d5d-800618bfd04f" (UID: "3d46d7ce-93f4-4247-9d5d-800618bfd04f"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.134597 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data" (OuterVolumeSpecName: "config-data") pod "3d46d7ce-93f4-4247-9d5d-800618bfd04f" (UID: "3d46d7ce-93f4-4247-9d5d-800618bfd04f"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.172542 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom\") pod \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.172663 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts\") pod \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.172706 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id\") pod \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.173907 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data\") pod \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.173988 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rpm66\" (UniqueName: \"kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66\") pod \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\" (UID: \"877fc2d2-b258-442f-b992-d4b2fc8e9a09\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174136 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "877fc2d2-b258-442f-b992-d4b2fc8e9a09" (UID: "877fc2d2-b258-442f-b992-d4b2fc8e9a09"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174398 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-pfs2n\" (UniqueName: \"kubernetes.io/projected/3d46d7ce-93f4-4247-9d5d-800618bfd04f-kube-api-access-pfs2n\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174417 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/3d46d7ce-93f4-4247-9d5d-800618bfd04f-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174435 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174447 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174458 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/877fc2d2-b258-442f-b992-d4b2fc8e9a09-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.174469 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/3d46d7ce-93f4-4247-9d5d-800618bfd04f-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.177492 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "877fc2d2-b258-442f-b992-d4b2fc8e9a09" (UID: "877fc2d2-b258-442f-b992-d4b2fc8e9a09"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.182496 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts" (OuterVolumeSpecName: "scripts") pod "877fc2d2-b258-442f-b992-d4b2fc8e9a09" (UID: "877fc2d2-b258-442f-b992-d4b2fc8e9a09"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.182648 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66" (OuterVolumeSpecName: "kube-api-access-rpm66") pod "877fc2d2-b258-442f-b992-d4b2fc8e9a09" (UID: "877fc2d2-b258-442f-b992-d4b2fc8e9a09"). InnerVolumeSpecName "kube-api-access-rpm66". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.189869 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.247309 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data" (OuterVolumeSpecName: "config-data") pod "877fc2d2-b258-442f-b992-d4b2fc8e9a09" (UID: "877fc2d2-b258-442f-b992-d4b2fc8e9a09"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.275966 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.276020 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.276034 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rpm66\" (UniqueName: \"kubernetes.io/projected/877fc2d2-b258-442f-b992-d4b2fc8e9a09-kube-api-access-rpm66\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.276044 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/877fc2d2-b258-442f-b992-d4b2fc8e9a09-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.313016 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.377814 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.377869 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-22djh\" (UniqueName: \"kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh\") pod \"2fdf0ff6-9404-43a6-89fa-c86902792344\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.377896 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id\") pod \"2fdf0ff6-9404-43a6-89fa-c86902792344\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.377943 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.377981 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts\") pod \"2fdf0ff6-9404-43a6-89fa-c86902792344\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378009 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data\") pod \"2fdf0ff6-9404-43a6-89fa-c86902792344\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378059 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378112 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378148 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c4mpd\" (UniqueName: \"kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378172 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs\") pod \"67e0fa34-87a7-481c-bd0e-a281ae56e369\" (UID: \"67e0fa34-87a7-481c-bd0e-a281ae56e369\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.378194 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom\") pod \"2fdf0ff6-9404-43a6-89fa-c86902792344\" (UID: \"2fdf0ff6-9404-43a6-89fa-c86902792344\") " Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.381890 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2fdf0ff6-9404-43a6-89fa-c86902792344" (UID: "2fdf0ff6-9404-43a6-89fa-c86902792344"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.383337 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.383404 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.383439 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2fdf0ff6-9404-43a6-89fa-c86902792344" (UID: "2fdf0ff6-9404-43a6-89fa-c86902792344"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.383893 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs" (OuterVolumeSpecName: "logs") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.384672 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd" (OuterVolumeSpecName: "kube-api-access-c4mpd") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "kube-api-access-c4mpd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.386067 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh" (OuterVolumeSpecName: "kube-api-access-22djh") pod "2fdf0ff6-9404-43a6-89fa-c86902792344" (UID: "2fdf0ff6-9404-43a6-89fa-c86902792344"). InnerVolumeSpecName "kube-api-access-22djh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.386693 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts" (OuterVolumeSpecName: "scripts") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.387970 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts" (OuterVolumeSpecName: "scripts") pod "2fdf0ff6-9404-43a6-89fa-c86902792344" (UID: "2fdf0ff6-9404-43a6-89fa-c86902792344"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.420139 4703 generic.go:334] "Generic (PLEG): container finished" podID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerID="bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f" exitCode=0 Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.420202 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerDied","Data":"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.420232 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"67e0fa34-87a7-481c-bd0e-a281ae56e369","Type":"ContainerDied","Data":"c9ded08cb8967a93f1a6fb77498b99ef275e62952d82d916484ecdaad95e93e3"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.420252 4703 scope.go:117] "RemoveContainer" containerID="bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.420453 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.425595 4703 generic.go:334] "Generic (PLEG): container finished" podID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerID="6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986" exitCode=0 Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.425827 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerDied","Data":"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.425878 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-2" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.425889 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-2" event={"ID":"877fc2d2-b258-442f-b992-d4b2fc8e9a09","Type":"ContainerDied","Data":"53c9d32cf646783f63cda814fe10267738d6086565e7b1c6ee5e2ac34b437b3d"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.428675 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data" (OuterVolumeSpecName: "config-data") pod "67e0fa34-87a7-481c-bd0e-a281ae56e369" (UID: "67e0fa34-87a7-481c-bd0e-a281ae56e369"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.429716 4703 generic.go:334] "Generic (PLEG): container finished" podID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerID="ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af" exitCode=0 Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.429820 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-1" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.430125 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerDied","Data":"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.430163 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-1" event={"ID":"2fdf0ff6-9404-43a6-89fa-c86902792344","Type":"ContainerDied","Data":"c9a7fb237d74eb8b65d7207275b99885b584eb56e4e2c4a35c91956fe71316bf"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.435396 4703 generic.go:334] "Generic (PLEG): container finished" podID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerID="80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d" exitCode=0 Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.435453 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerDied","Data":"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.435474 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"3d46d7ce-93f4-4247-9d5d-800618bfd04f","Type":"ContainerDied","Data":"27326a00b6fee502023de7b1cf616af859185343a2d20a21b812503481294b00"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.435528 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.439819 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" event={"ID":"86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb","Type":"ContainerDied","Data":"3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a"} Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.439856 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="3f4a179a39b25b407c255294b4fec9b1f537ea8dd6e8fa0813c159b6b6aa862a" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.439903 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila9697-account-delete-zk8sh" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.448137 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data" (OuterVolumeSpecName: "config-data") pod "2fdf0ff6-9404-43a6-89fa-c86902792344" (UID: "2fdf0ff6-9404-43a6-89fa-c86902792344"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.451376 4703 scope.go:117] "RemoveContainer" containerID="f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.462248 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.468751 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-2"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.472507 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.477379 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479394 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479434 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/67e0fa34-87a7-481c-bd0e-a281ae56e369-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479444 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479454 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c4mpd\" (UniqueName: \"kubernetes.io/projected/67e0fa34-87a7-481c-bd0e-a281ae56e369-kube-api-access-c4mpd\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479464 4703 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/67e0fa34-87a7-481c-bd0e-a281ae56e369-logs\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479474 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479484 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479492 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-22djh\" (UniqueName: \"kubernetes.io/projected/2fdf0ff6-9404-43a6-89fa-c86902792344-kube-api-access-22djh\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479500 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2fdf0ff6-9404-43a6-89fa-c86902792344-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479508 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/67e0fa34-87a7-481c-bd0e-a281ae56e369-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.479517 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2fdf0ff6-9404-43a6-89fa-c86902792344-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.482637 4703 scope.go:117] "RemoveContainer" containerID="bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.483102 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f\": container with ID starting with bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f not found: ID does not exist" containerID="bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.483143 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f"} err="failed to get container status \"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f\": rpc error: code = NotFound desc = could not find container \"bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f\": container with ID starting with bd55b6a92e00f594517d4eaeff413c42336ff1892c30f4b5fc4b9ff038c3443f not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.483170 4703 scope.go:117] "RemoveContainer" containerID="f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.483619 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74\": container with ID starting with f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74 not found: ID does not exist" containerID="f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.483652 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74"} err="failed to get container status \"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74\": rpc error: code = NotFound desc = could not find container \"f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74\": container with ID starting with f7058c55ee1199419d27b390cb58721e4cfa1ae626d59daa78a6877bc9d3fe74 not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.483676 4703 scope.go:117] "RemoveContainer" containerID="5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.499939 4703 scope.go:117] "RemoveContainer" containerID="6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.516235 4703 scope.go:117] "RemoveContainer" containerID="5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.516625 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9\": container with ID starting with 5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9 not found: ID does not exist" containerID="5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.516700 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9"} err="failed to get container status \"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9\": rpc error: code = NotFound desc = could not find container \"5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9\": container with ID starting with 5d1215386e0b70874440ca8d404f0df9f198bcaf0bfe739cef03c893b20aeeb9 not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.516729 4703 scope.go:117] "RemoveContainer" containerID="6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.517113 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986\": container with ID starting with 6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986 not found: ID does not exist" containerID="6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.517143 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986"} err="failed to get container status \"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986\": rpc error: code = NotFound desc = could not find container \"6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986\": container with ID starting with 6fc8b910efa58bcd864bbe46c5a6e1e5f45878116387ea6339344bb05a2be986 not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.517165 4703 scope.go:117] "RemoveContainer" containerID="37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.533558 4703 scope.go:117] "RemoveContainer" containerID="ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.552802 4703 scope.go:117] "RemoveContainer" containerID="37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.553572 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1\": container with ID starting with 37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1 not found: ID does not exist" containerID="37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.553704 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1"} err="failed to get container status \"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1\": rpc error: code = NotFound desc = could not find container \"37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1\": container with ID starting with 37843c39442b491e60d10e05f375d4da624f4bbf71b2c8e52c7fa2d7b32dafb1 not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.553841 4703 scope.go:117] "RemoveContainer" containerID="ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.554589 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af\": container with ID starting with ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af not found: ID does not exist" containerID="ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.554794 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af"} err="failed to get container status \"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af\": rpc error: code = NotFound desc = could not find container \"ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af\": container with ID starting with ed646df9f6e5e7474919ed8f3143db42ba3be10d1459a10babf288458a7bd0af not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.554876 4703 scope.go:117] "RemoveContainer" containerID="768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.573885 4703 scope.go:117] "RemoveContainer" containerID="80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.592393 4703 scope.go:117] "RemoveContainer" containerID="768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.592937 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718\": container with ID starting with 768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718 not found: ID does not exist" containerID="768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.592995 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718"} err="failed to get container status \"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718\": rpc error: code = NotFound desc = could not find container \"768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718\": container with ID starting with 768c116c78636b94bc25dc5f1b33795113972a5848fda6496fc3bf2b9158b718 not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.593030 4703 scope.go:117] "RemoveContainer" containerID="80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d" Feb 02 13:12:54 crc kubenswrapper[4703]: E0202 13:12:54.593357 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d\": container with ID starting with 80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d not found: ID does not exist" containerID="80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.593381 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d"} err="failed to get container status \"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d\": rpc error: code = NotFound desc = could not find container \"80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d\": container with ID starting with 80d010fbd689d8d1695e2c5735db482c5aad7a90f6579e2dd16e4904705ec28d not found: ID does not exist" Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.758538 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.764575 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.777133 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:54 crc kubenswrapper[4703]: I0202 13:12:54.782142 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-1"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.654576 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-qmnbh"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.665291 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-qmnbh"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.676263 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila9697-account-delete-zk8sh"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.686611 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-9697-account-create-update-4snxw"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.695806 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-9697-account-create-update-4snxw"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.700976 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila9697-account-delete-zk8sh"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.738887 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-jngfh"] Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739153 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739172 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739192 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739197 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739207 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739213 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739221 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739227 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739236 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739241 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739253 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api-log" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739259 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api-log" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739293 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="manila-share" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739305 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="manila-share" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739314 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739321 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739332 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739339 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739350 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" containerName="mariadb-account-delete" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739355 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" containerName="mariadb-account-delete" Feb 02 13:12:55 crc kubenswrapper[4703]: E0202 13:12:55.739363 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.739368 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741468 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741490 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741501 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741513 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741525 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" containerName="mariadb-account-delete" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741534 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api-log" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741543 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741553 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="manila-scheduler" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741566 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" containerName="probe" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741574 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="216a3968-f8c5-466f-bbd3-22447464e608" containerName="manila-share" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.741591 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" containerName="manila-api" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.742142 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.750580 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-jngfh"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.841633 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-1b16-account-create-update-hcbbx"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.842489 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.844909 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.847578 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-1b16-account-create-update-hcbbx"] Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.899160 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.899220 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-gjdh9\" (UniqueName: \"kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.942306 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="068b7b7f-2185-4928-adc3-99bd78652039" path="/var/lib/kubelet/pods/068b7b7f-2185-4928-adc3-99bd78652039/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.943034 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2fdf0ff6-9404-43a6-89fa-c86902792344" path="/var/lib/kubelet/pods/2fdf0ff6-9404-43a6-89fa-c86902792344/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.943699 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3d46d7ce-93f4-4247-9d5d-800618bfd04f" path="/var/lib/kubelet/pods/3d46d7ce-93f4-4247-9d5d-800618bfd04f/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.947889 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="67e0fa34-87a7-481c-bd0e-a281ae56e369" path="/var/lib/kubelet/pods/67e0fa34-87a7-481c-bd0e-a281ae56e369/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.948631 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb" path="/var/lib/kubelet/pods/86c3f982-e5d6-4dcc-8d0a-8e53eeb566bb/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.949583 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="877fc2d2-b258-442f-b992-d4b2fc8e9a09" path="/var/lib/kubelet/pods/877fc2d2-b258-442f-b992-d4b2fc8e9a09/volumes" Feb 02 13:12:55 crc kubenswrapper[4703]: I0202 13:12:55.950210 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c31a25e9-58d5-45c1-b62d-eaf54d4d3766" path="/var/lib/kubelet/pods/c31a25e9-58d5-45c1-b62d-eaf54d4d3766/volumes" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.000235 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lrv54\" (UniqueName: \"kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.000315 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.000369 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.000791 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-gjdh9\" (UniqueName: \"kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.001445 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.024008 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-gjdh9\" (UniqueName: \"kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9\") pod \"manila-db-create-jngfh\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.102675 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lrv54\" (UniqueName: \"kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.102757 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.103870 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.106656 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.118151 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lrv54\" (UniqueName: \"kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54\") pod \"manila-1b16-account-create-update-hcbbx\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.162972 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.530636 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-jngfh"] Feb 02 13:12:56 crc kubenswrapper[4703]: I0202 13:12:56.641378 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-1b16-account-create-update-hcbbx"] Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.486007 4703 generic.go:334] "Generic (PLEG): container finished" podID="83d8a461-7137-49da-ae6d-1a50ebc3d4f1" containerID="2a6d5fd32f54080c9373342dba8867eb91db2ed2acb08197eebfeeac7dae899d" exitCode=0 Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.486067 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-jngfh" event={"ID":"83d8a461-7137-49da-ae6d-1a50ebc3d4f1","Type":"ContainerDied","Data":"2a6d5fd32f54080c9373342dba8867eb91db2ed2acb08197eebfeeac7dae899d"} Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.486115 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-jngfh" event={"ID":"83d8a461-7137-49da-ae6d-1a50ebc3d4f1","Type":"ContainerStarted","Data":"f6f8ca76cb721d886c6c0ba1d456f3b7d9651dc1cf9cece123b510be61a0f3f8"} Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.487755 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" event={"ID":"2984c983-0968-4970-abbd-087609948c41","Type":"ContainerStarted","Data":"19b0919e9bfb8a13878c1b958f9cf3fe5197f70c3f96fe8350c9dc228898f5ee"} Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.487806 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" event={"ID":"2984c983-0968-4970-abbd-087609948c41","Type":"ContainerStarted","Data":"da25e04d7cc84fec521f469dfc9bdda1523fdd921ca401e933668f2a29e9a609"} Feb 02 13:12:57 crc kubenswrapper[4703]: I0202 13:12:57.516057 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" podStartSLOduration=2.516038984 podStartE2EDuration="2.516038984s" podCreationTimestamp="2026-02-02 13:12:55 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:12:57.513349239 +0000 UTC m=+1304.528556773" watchObservedRunningTime="2026-02-02 13:12:57.516038984 +0000 UTC m=+1304.531246518" Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.496641 4703 generic.go:334] "Generic (PLEG): container finished" podID="2984c983-0968-4970-abbd-087609948c41" containerID="19b0919e9bfb8a13878c1b958f9cf3fe5197f70c3f96fe8350c9dc228898f5ee" exitCode=0 Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.496726 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" event={"ID":"2984c983-0968-4970-abbd-087609948c41","Type":"ContainerDied","Data":"19b0919e9bfb8a13878c1b958f9cf3fe5197f70c3f96fe8350c9dc228898f5ee"} Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.776328 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.862503 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts\") pod \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.862689 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-gjdh9\" (UniqueName: \"kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9\") pod \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\" (UID: \"83d8a461-7137-49da-ae6d-1a50ebc3d4f1\") " Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.864533 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "83d8a461-7137-49da-ae6d-1a50ebc3d4f1" (UID: "83d8a461-7137-49da-ae6d-1a50ebc3d4f1"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.872984 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9" (OuterVolumeSpecName: "kube-api-access-gjdh9") pod "83d8a461-7137-49da-ae6d-1a50ebc3d4f1" (UID: "83d8a461-7137-49da-ae6d-1a50ebc3d4f1"). InnerVolumeSpecName "kube-api-access-gjdh9". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.963871 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:58 crc kubenswrapper[4703]: I0202 13:12:58.963903 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-gjdh9\" (UniqueName: \"kubernetes.io/projected/83d8a461-7137-49da-ae6d-1a50ebc3d4f1-kube-api-access-gjdh9\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.505879 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-jngfh" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.505872 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-jngfh" event={"ID":"83d8a461-7137-49da-ae6d-1a50ebc3d4f1","Type":"ContainerDied","Data":"f6f8ca76cb721d886c6c0ba1d456f3b7d9651dc1cf9cece123b510be61a0f3f8"} Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.505930 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f6f8ca76cb721d886c6c0ba1d456f3b7d9651dc1cf9cece123b510be61a0f3f8" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.752840 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.780979 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lrv54\" (UniqueName: \"kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54\") pod \"2984c983-0968-4970-abbd-087609948c41\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.781033 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts\") pod \"2984c983-0968-4970-abbd-087609948c41\" (UID: \"2984c983-0968-4970-abbd-087609948c41\") " Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.781670 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "2984c983-0968-4970-abbd-087609948c41" (UID: "2984c983-0968-4970-abbd-087609948c41"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.786423 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54" (OuterVolumeSpecName: "kube-api-access-lrv54") pod "2984c983-0968-4970-abbd-087609948c41" (UID: "2984c983-0968-4970-abbd-087609948c41"). InnerVolumeSpecName "kube-api-access-lrv54". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.882517 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lrv54\" (UniqueName: \"kubernetes.io/projected/2984c983-0968-4970-abbd-087609948c41-kube-api-access-lrv54\") on node \"crc\" DevicePath \"\"" Feb 02 13:12:59 crc kubenswrapper[4703]: I0202 13:12:59.882558 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/2984c983-0968-4970-abbd-087609948c41-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:00 crc kubenswrapper[4703]: I0202 13:13:00.514875 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" event={"ID":"2984c983-0968-4970-abbd-087609948c41","Type":"ContainerDied","Data":"da25e04d7cc84fec521f469dfc9bdda1523fdd921ca401e933668f2a29e9a609"} Feb 02 13:13:00 crc kubenswrapper[4703]: I0202 13:13:00.514929 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="da25e04d7cc84fec521f469dfc9bdda1523fdd921ca401e933668f2a29e9a609" Feb 02 13:13:00 crc kubenswrapper[4703]: I0202 13:13:00.514968 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-1b16-account-create-update-hcbbx" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.060225 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-9788l"] Feb 02 13:13:01 crc kubenswrapper[4703]: E0202 13:13:01.060510 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="83d8a461-7137-49da-ae6d-1a50ebc3d4f1" containerName="mariadb-database-create" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.060524 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="83d8a461-7137-49da-ae6d-1a50ebc3d4f1" containerName="mariadb-database-create" Feb 02 13:13:01 crc kubenswrapper[4703]: E0202 13:13:01.060546 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2984c983-0968-4970-abbd-087609948c41" containerName="mariadb-account-create-update" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.060552 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2984c983-0968-4970-abbd-087609948c41" containerName="mariadb-account-create-update" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.060655 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="83d8a461-7137-49da-ae6d-1a50ebc3d4f1" containerName="mariadb-database-create" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.060680 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2984c983-0968-4970-abbd-087609948c41" containerName="mariadb-account-create-update" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.061121 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.063359 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.063412 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-h95r2" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.063357 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"combined-ca-bundle" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.079387 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-9788l"] Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.098290 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.098349 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.098510 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.098620 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-c6ptq\" (UniqueName: \"kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.199410 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.200132 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.200178 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-c6ptq\" (UniqueName: \"kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.200240 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.203877 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.204413 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.204582 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.216471 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-c6ptq\" (UniqueName: \"kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq\") pod \"manila-db-sync-9788l\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.373663 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:01 crc kubenswrapper[4703]: I0202 13:13:01.609995 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-9788l"] Feb 02 13:13:02 crc kubenswrapper[4703]: I0202 13:13:02.531980 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-9788l" event={"ID":"2192eb12-795d-43a3-b56f-2efd9761d067","Type":"ContainerStarted","Data":"449f7488b9d6b7c7abd9737282ba124b1c541003454f2e8499d9d271b9aca602"} Feb 02 13:13:02 crc kubenswrapper[4703]: I0202 13:13:02.532348 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-9788l" event={"ID":"2192eb12-795d-43a3-b56f-2efd9761d067","Type":"ContainerStarted","Data":"8cbaf746c4bdbb6bb9eb4be691fb1260d11f5cf019137ad3ddbd01612f442951"} Feb 02 13:13:02 crc kubenswrapper[4703]: I0202 13:13:02.561019 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-9788l" podStartSLOduration=1.560993879 podStartE2EDuration="1.560993879s" podCreationTimestamp="2026-02-02 13:13:01 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:02.557726817 +0000 UTC m=+1309.572934351" watchObservedRunningTime="2026-02-02 13:13:02.560993879 +0000 UTC m=+1309.576201413" Feb 02 13:13:03 crc kubenswrapper[4703]: I0202 13:13:03.541551 4703 generic.go:334] "Generic (PLEG): container finished" podID="2192eb12-795d-43a3-b56f-2efd9761d067" containerID="449f7488b9d6b7c7abd9737282ba124b1c541003454f2e8499d9d271b9aca602" exitCode=0 Feb 02 13:13:03 crc kubenswrapper[4703]: I0202 13:13:03.541605 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-9788l" event={"ID":"2192eb12-795d-43a3-b56f-2efd9761d067","Type":"ContainerDied","Data":"449f7488b9d6b7c7abd9737282ba124b1c541003454f2e8499d9d271b9aca602"} Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.817680 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.974372 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-c6ptq\" (UniqueName: \"kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq\") pod \"2192eb12-795d-43a3-b56f-2efd9761d067\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.974474 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle\") pod \"2192eb12-795d-43a3-b56f-2efd9761d067\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.974507 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data\") pod \"2192eb12-795d-43a3-b56f-2efd9761d067\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.974533 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data\") pod \"2192eb12-795d-43a3-b56f-2efd9761d067\" (UID: \"2192eb12-795d-43a3-b56f-2efd9761d067\") " Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.979729 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq" (OuterVolumeSpecName: "kube-api-access-c6ptq") pod "2192eb12-795d-43a3-b56f-2efd9761d067" (UID: "2192eb12-795d-43a3-b56f-2efd9761d067"). InnerVolumeSpecName "kube-api-access-c6ptq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.980339 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "2192eb12-795d-43a3-b56f-2efd9761d067" (UID: "2192eb12-795d-43a3-b56f-2efd9761d067"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.983833 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data" (OuterVolumeSpecName: "config-data") pod "2192eb12-795d-43a3-b56f-2efd9761d067" (UID: "2192eb12-795d-43a3-b56f-2efd9761d067"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:04 crc kubenswrapper[4703]: I0202 13:13:04.994178 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2192eb12-795d-43a3-b56f-2efd9761d067" (UID: "2192eb12-795d-43a3-b56f-2efd9761d067"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.076254 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-c6ptq\" (UniqueName: \"kubernetes.io/projected/2192eb12-795d-43a3-b56f-2efd9761d067-kube-api-access-c6ptq\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.076610 4703 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.076623 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.076635 4703 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/2192eb12-795d-43a3-b56f-2efd9761d067-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.555670 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-9788l" event={"ID":"2192eb12-795d-43a3-b56f-2efd9761d067","Type":"ContainerDied","Data":"8cbaf746c4bdbb6bb9eb4be691fb1260d11f5cf019137ad3ddbd01612f442951"} Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.555721 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="8cbaf746c4bdbb6bb9eb4be691fb1260d11f5cf019137ad3ddbd01612f442951" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.555733 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-9788l" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.844140 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:05 crc kubenswrapper[4703]: E0202 13:13:05.844443 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2192eb12-795d-43a3-b56f-2efd9761d067" containerName="manila-db-sync" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.844457 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2192eb12-795d-43a3-b56f-2efd9761d067" containerName="manila-db-sync" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.844574 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2192eb12-795d-43a3-b56f-2efd9761d067" containerName="manila-db-sync" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.845211 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.847829 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.848044 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-h95r2" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.848174 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"combined-ca-bundle" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.848725 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.851823 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.856197 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.857495 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.861299 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.861523 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.871903 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.887741 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888190 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888231 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-twbtj\" (UniqueName: \"kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888259 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888288 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888303 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888351 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888377 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888416 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6fh2c\" (UniqueName: \"kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888439 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888457 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888483 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888500 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888522 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.888536 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989671 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6fh2c\" (UniqueName: \"kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989756 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989787 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989828 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989853 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989887 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989909 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989955 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-twbtj\" (UniqueName: \"kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.989976 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990003 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990026 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990049 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990090 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990114 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990124 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.990154 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.994264 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.995238 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:05 crc kubenswrapper[4703]: I0202 13:13:05.995466 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.002978 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.005809 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.012821 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.014693 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6fh2c\" (UniqueName: \"kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.028373 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-twbtj\" (UniqueName: \"kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.038106 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.038409 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.039402 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data\") pod \"manila-scheduler-0\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.041252 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.153796 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.164311 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.167748 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"cert-manila-public-svc" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.167957 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"cert-manila-internal-svc" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.167762 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.176320 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.183547 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.192114 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.294910 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295368 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295409 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295464 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295501 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295549 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rstzd\" (UniqueName: \"kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295740 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295804 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.295859 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397211 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397253 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397296 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397325 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397342 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397370 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397421 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397441 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.397483 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rstzd\" (UniqueName: \"kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.398210 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.400336 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.402833 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.402867 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.403552 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.404825 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.405715 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.406611 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.419901 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rstzd\" (UniqueName: \"kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd\") pod \"manila-api-0\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.483087 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.490544 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.556772 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:06 crc kubenswrapper[4703]: W0202 13:13:06.566858 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podc690d78d_e839_457c_977b_1d4386653341.slice/crio-9f8512fb70bb483dc7ce9c22f9db05a371ce8fe0b4d3cbc475cea403cd207ad6 WatchSource:0}: Error finding container 9f8512fb70bb483dc7ce9c22f9db05a371ce8fe0b4d3cbc475cea403cd207ad6: Status 404 returned error can't find the container with id 9f8512fb70bb483dc7ce9c22f9db05a371ce8fe0b4d3cbc475cea403cd207ad6 Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.573936 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerStarted","Data":"54a99a225484b62fb2856882af42124942b707c6cae85e46e1ee89061b1aeef2"} Feb 02 13:13:06 crc kubenswrapper[4703]: I0202 13:13:06.930469 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:06 crc kubenswrapper[4703]: W0202 13:13:06.934845 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod4eed06dc_6be2_44ed_8346_ff7bc2b829ff.slice/crio-86e77b8b70aa50a7edc9de0e412069b06f53d2cebcffa8e14ee5a1843b852416 WatchSource:0}: Error finding container 86e77b8b70aa50a7edc9de0e412069b06f53d2cebcffa8e14ee5a1843b852416: Status 404 returned error can't find the container with id 86e77b8b70aa50a7edc9de0e412069b06f53d2cebcffa8e14ee5a1843b852416 Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.587096 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerStarted","Data":"187e605b8b5ba9512924c8be5b00045e68ff6cf8047cc9aa462bd5e43ac07269"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.587685 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerStarted","Data":"26ad83c2bfc195a4f39611759fa0d0fc5b926331f993c1f47862bf2ea99d61c6"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.587704 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerStarted","Data":"9f8512fb70bb483dc7ce9c22f9db05a371ce8fe0b4d3cbc475cea403cd207ad6"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.598478 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerStarted","Data":"d0cc208d199fb4ec1ce336f69119fa126d230d3c43f880c65686f70b4d97e8e4"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.598528 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerStarted","Data":"375af6d9eb40481cc844a4997795355582b423c00e1140e968a54f9264d1044e"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.600751 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerStarted","Data":"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.600785 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerStarted","Data":"86e77b8b70aa50a7edc9de0e412069b06f53d2cebcffa8e14ee5a1843b852416"} Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.618287 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=2.618242698 podStartE2EDuration="2.618242698s" podCreationTimestamp="2026-02-02 13:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:07.60724498 +0000 UTC m=+1314.622452514" watchObservedRunningTime="2026-02-02 13:13:07.618242698 +0000 UTC m=+1314.633450232" Feb 02 13:13:07 crc kubenswrapper[4703]: I0202 13:13:07.629704 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=2.629684689 podStartE2EDuration="2.629684689s" podCreationTimestamp="2026-02-02 13:13:05 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:07.627267401 +0000 UTC m=+1314.642474935" watchObservedRunningTime="2026-02-02 13:13:07.629684689 +0000 UTC m=+1314.644892223" Feb 02 13:13:08 crc kubenswrapper[4703]: I0202 13:13:08.609587 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerStarted","Data":"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751"} Feb 02 13:13:08 crc kubenswrapper[4703]: I0202 13:13:08.610148 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:08 crc kubenswrapper[4703]: I0202 13:13:08.637728 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=2.637708876 podStartE2EDuration="2.637708876s" podCreationTimestamp="2026-02-02 13:13:06 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:08.633483498 +0000 UTC m=+1315.648691052" watchObservedRunningTime="2026-02-02 13:13:08.637708876 +0000 UTC m=+1315.652916410" Feb 02 13:13:16 crc kubenswrapper[4703]: I0202 13:13:16.178007 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:16 crc kubenswrapper[4703]: I0202 13:13:16.183951 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:17 crc kubenswrapper[4703]: I0202 13:13:17.725316 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:17 crc kubenswrapper[4703]: I0202 13:13:17.971997 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:27 crc kubenswrapper[4703]: I0202 13:13:27.809756 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:28 crc kubenswrapper[4703]: I0202 13:13:28.979064 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-9788l"] Feb 02 13:13:28 crc kubenswrapper[4703]: I0202 13:13:28.988097 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-9788l"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.018332 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.018716 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="manila-scheduler" containerID="cri-o://26ad83c2bfc195a4f39611759fa0d0fc5b926331f993c1f47862bf2ea99d61c6" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.018746 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="probe" containerID="cri-o://187e605b8b5ba9512924c8be5b00045e68ff6cf8047cc9aa462bd5e43ac07269" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.029554 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.038709 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="manila-share" containerID="cri-o://375af6d9eb40481cc844a4997795355582b423c00e1140e968a54f9264d1044e" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.038470 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="probe" containerID="cri-o://d0cc208d199fb4ec1ce336f69119fa126d230d3c43f880c65686f70b4d97e8e4" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.070033 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila1b16-account-delete-2wj6b"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.076923 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.081383 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila1b16-account-delete-2wj6b"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.096402 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.096653 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api-log" containerID="cri-o://c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.097013 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api" containerID="cri-o://0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751" gracePeriod=30 Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.246419 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-nnxtp\" (UniqueName: \"kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.246803 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.348469 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-nnxtp\" (UniqueName: \"kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.348583 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.349726 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.372094 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-nnxtp\" (UniqueName: \"kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp\") pod \"manila1b16-account-delete-2wj6b\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:29 crc kubenswrapper[4703]: I0202 13:13:29.403403 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.652513 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila1b16-account-delete-2wj6b"] Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.749252 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" event={"ID":"88106aba-660a-43de-ba05-bdb77e13c0f7","Type":"ContainerStarted","Data":"d63127584a7a7a746e17d4b125a30bb81143b40f0e5e37c37bb2028e7cccbef8"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.753044 4703 generic.go:334] "Generic (PLEG): container finished" podID="c690d78d-e839-457c-977b-1d4386653341" containerID="187e605b8b5ba9512924c8be5b00045e68ff6cf8047cc9aa462bd5e43ac07269" exitCode=0 Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.753132 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerDied","Data":"187e605b8b5ba9512924c8be5b00045e68ff6cf8047cc9aa462bd5e43ac07269"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.756817 4703 generic.go:334] "Generic (PLEG): container finished" podID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerID="d0cc208d199fb4ec1ce336f69119fa126d230d3c43f880c65686f70b4d97e8e4" exitCode=0 Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.756840 4703 generic.go:334] "Generic (PLEG): container finished" podID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerID="375af6d9eb40481cc844a4997795355582b423c00e1140e968a54f9264d1044e" exitCode=1 Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.756885 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerDied","Data":"d0cc208d199fb4ec1ce336f69119fa126d230d3c43f880c65686f70b4d97e8e4"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.756907 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerDied","Data":"375af6d9eb40481cc844a4997795355582b423c00e1140e968a54f9264d1044e"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.758555 4703 generic.go:334] "Generic (PLEG): container finished" podID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerID="c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42" exitCode=143 Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.758575 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerDied","Data":"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:29.941321 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2192eb12-795d-43a3-b56f-2efd9761d067" path="/var/lib/kubelet/pods/2192eb12-795d-43a3-b56f-2efd9761d067/volumes" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.497570 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666760 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666842 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666903 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666925 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666955 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.666984 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.667014 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.667051 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6fh2c\" (UniqueName: \"kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c\") pod \"2db4f393-e3be-438e-90aa-c7d861857d5d\" (UID: \"2db4f393-e3be-438e-90aa-c7d861857d5d\") " Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.668334 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.668391 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.673885 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c" (OuterVolumeSpecName: "kube-api-access-6fh2c") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "kube-api-access-6fh2c". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.690548 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph" (OuterVolumeSpecName: "ceph") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.690634 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.690659 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts" (OuterVolumeSpecName: "scripts") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.707452 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.754096 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data" (OuterVolumeSpecName: "config-data") pod "2db4f393-e3be-438e-90aa-c7d861857d5d" (UID: "2db4f393-e3be-438e-90aa-c7d861857d5d"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.766730 4703 generic.go:334] "Generic (PLEG): container finished" podID="88106aba-660a-43de-ba05-bdb77e13c0f7" containerID="2234618673f4a5d42c1f0957e677fba837ff4a29f53b64e0d0f28ef861c78dd7" exitCode=0 Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.766793 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" event={"ID":"88106aba-660a-43de-ba05-bdb77e13c0f7","Type":"ContainerDied","Data":"2234618673f4a5d42c1f0957e677fba837ff4a29f53b64e0d0f28ef861c78dd7"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768174 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768210 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768221 4703 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768230 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768239 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768248 4703 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/2db4f393-e3be-438e-90aa-c7d861857d5d-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768255 4703 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/2db4f393-e3be-438e-90aa-c7d861857d5d-var-lib-manila\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768263 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6fh2c\" (UniqueName: \"kubernetes.io/projected/2db4f393-e3be-438e-90aa-c7d861857d5d-kube-api-access-6fh2c\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768439 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"2db4f393-e3be-438e-90aa-c7d861857d5d","Type":"ContainerDied","Data":"54a99a225484b62fb2856882af42124942b707c6cae85e46e1ee89061b1aeef2"} Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768462 4703 scope.go:117] "RemoveContainer" containerID="d0cc208d199fb4ec1ce336f69119fa126d230d3c43f880c65686f70b4d97e8e4" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.768473 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.787561 4703 scope.go:117] "RemoveContainer" containerID="375af6d9eb40481cc844a4997795355582b423c00e1140e968a54f9264d1044e" Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.801066 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:30 crc kubenswrapper[4703]: I0202 13:13:30.808492 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:31 crc kubenswrapper[4703]: I0202 13:13:31.941766 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" path="/var/lib/kubelet/pods/2db4f393-e3be-438e-90aa-c7d861857d5d/volumes" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.026136 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.187655 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts\") pod \"88106aba-660a-43de-ba05-bdb77e13c0f7\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.187974 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nnxtp\" (UniqueName: \"kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp\") pod \"88106aba-660a-43de-ba05-bdb77e13c0f7\" (UID: \"88106aba-660a-43de-ba05-bdb77e13c0f7\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.188645 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "88106aba-660a-43de-ba05-bdb77e13c0f7" (UID: "88106aba-660a-43de-ba05-bdb77e13c0f7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.197343 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp" (OuterVolumeSpecName: "kube-api-access-nnxtp") pod "88106aba-660a-43de-ba05-bdb77e13c0f7" (UID: "88106aba-660a-43de-ba05-bdb77e13c0f7"). InnerVolumeSpecName "kube-api-access-nnxtp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.290351 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/88106aba-660a-43de-ba05-bdb77e13c0f7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.290729 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nnxtp\" (UniqueName: \"kubernetes.io/projected/88106aba-660a-43de-ba05-bdb77e13c0f7-kube-api-access-nnxtp\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.603993 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.695831 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.695914 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.695952 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rstzd\" (UniqueName: \"kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696024 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696064 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696082 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696121 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696136 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696158 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs\") pod \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\" (UID: \"4eed06dc-6be2-44ed-8346-ff7bc2b829ff\") " Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696565 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs" (OuterVolumeSpecName: "logs") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.696615 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.700208 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts" (OuterVolumeSpecName: "scripts") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.700604 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.711388 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd" (OuterVolumeSpecName: "kube-api-access-rstzd") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "kube-api-access-rstzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.722322 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.730452 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs" (OuterVolumeSpecName: "internal-tls-certs") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "internal-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.731710 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data" (OuterVolumeSpecName: "config-data") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.734474 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs" (OuterVolumeSpecName: "public-tls-certs") pod "4eed06dc-6be2-44ed-8346-ff7bc2b829ff" (UID: "4eed06dc-6be2-44ed-8346-ff7bc2b829ff"). InnerVolumeSpecName "public-tls-certs". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.784816 4703 generic.go:334] "Generic (PLEG): container finished" podID="c690d78d-e839-457c-977b-1d4386653341" containerID="26ad83c2bfc195a4f39611759fa0d0fc5b926331f993c1f47862bf2ea99d61c6" exitCode=0 Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.784911 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerDied","Data":"26ad83c2bfc195a4f39611759fa0d0fc5b926331f993c1f47862bf2ea99d61c6"} Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.788303 4703 generic.go:334] "Generic (PLEG): container finished" podID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerID="0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751" exitCode=0 Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.788366 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerDied","Data":"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751"} Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.788400 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"4eed06dc-6be2-44ed-8346-ff7bc2b829ff","Type":"ContainerDied","Data":"86e77b8b70aa50a7edc9de0e412069b06f53d2cebcffa8e14ee5a1843b852416"} Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.788419 4703 scope.go:117] "RemoveContainer" containerID="0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.788557 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.796177 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" event={"ID":"88106aba-660a-43de-ba05-bdb77e13c0f7","Type":"ContainerDied","Data":"d63127584a7a7a746e17d4b125a30bb81143b40f0e5e37c37bb2028e7cccbef8"} Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.796216 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d63127584a7a7a746e17d4b125a30bb81143b40f0e5e37c37bb2028e7cccbef8" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.796231 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila1b16-account-delete-2wj6b" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797594 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797614 4703 reconciler_common.go:293] "Volume detached for volume \"internal-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-internal-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797626 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797636 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797644 4703 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797652 4703 reconciler_common.go:293] "Volume detached for volume \"public-tls-certs\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-public-tls-certs\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797660 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797669 4703 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-logs\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.797676 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rstzd\" (UniqueName: \"kubernetes.io/projected/4eed06dc-6be2-44ed-8346-ff7bc2b829ff-kube-api-access-rstzd\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.815762 4703 scope.go:117] "RemoveContainer" containerID="c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.823821 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.835030 4703 scope.go:117] "RemoveContainer" containerID="0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751" Feb 02 13:13:32 crc kubenswrapper[4703]: E0202 13:13:32.835529 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751\": container with ID starting with 0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751 not found: ID does not exist" containerID="0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.835588 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751"} err="failed to get container status \"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751\": rpc error: code = NotFound desc = could not find container \"0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751\": container with ID starting with 0ef1c7ea3024ed03519e5b18fdc2f5bd460c9313886c7163b251ae35852a4751 not found: ID does not exist" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.835645 4703 scope.go:117] "RemoveContainer" containerID="c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42" Feb 02 13:13:32 crc kubenswrapper[4703]: E0202 13:13:32.835940 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42\": container with ID starting with c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42 not found: ID does not exist" containerID="c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.835979 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42"} err="failed to get container status \"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42\": rpc error: code = NotFound desc = could not find container \"c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42\": container with ID starting with c8186152c2f01611f1df95f7fdcd37ef623ef1d6fc5c0e495ad79e6df13e6a42 not found: ID does not exist" Feb 02 13:13:32 crc kubenswrapper[4703]: I0202 13:13:32.836489 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:32.997786 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101509 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101592 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-twbtj\" (UniqueName: \"kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101624 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101660 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101697 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101717 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.101793 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts\") pod \"c690d78d-e839-457c-977b-1d4386653341\" (UID: \"c690d78d-e839-457c-977b-1d4386653341\") " Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.102136 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/c690d78d-e839-457c-977b-1d4386653341-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.105030 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.105487 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts" (OuterVolumeSpecName: "scripts") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.109482 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj" (OuterVolumeSpecName: "kube-api-access-twbtj") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "kube-api-access-twbtj". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.138367 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle" (OuterVolumeSpecName: "combined-ca-bundle") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "combined-ca-bundle". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.177723 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data" (OuterVolumeSpecName: "config-data") pod "c690d78d-e839-457c-977b-1d4386653341" (UID: "c690d78d-e839-457c-977b-1d4386653341"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.203008 4703 reconciler_common.go:293] "Volume detached for volume \"combined-ca-bundle\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-combined-ca-bundle\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.203051 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.203063 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.203076 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-twbtj\" (UniqueName: \"kubernetes.io/projected/c690d78d-e839-457c-977b-1d4386653341-kube-api-access-twbtj\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.203089 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/c690d78d-e839-457c-977b-1d4386653341-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.805478 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"c690d78d-e839-457c-977b-1d4386653341","Type":"ContainerDied","Data":"9f8512fb70bb483dc7ce9c22f9db05a371ce8fe0b4d3cbc475cea403cd207ad6"} Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.805556 4703 scope.go:117] "RemoveContainer" containerID="187e605b8b5ba9512924c8be5b00045e68ff6cf8047cc9aa462bd5e43ac07269" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.805560 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.825265 4703 scope.go:117] "RemoveContainer" containerID="26ad83c2bfc195a4f39611759fa0d0fc5b926331f993c1f47862bf2ea99d61c6" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.854901 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.859850 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.942810 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" path="/var/lib/kubelet/pods/4eed06dc-6be2-44ed-8346-ff7bc2b829ff/volumes" Feb 02 13:13:33 crc kubenswrapper[4703]: I0202 13:13:33.943356 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c690d78d-e839-457c-977b-1d4386653341" path="/var/lib/kubelet/pods/c690d78d-e839-457c-977b-1d4386653341/volumes" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.087778 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-jngfh"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.094566 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-jngfh"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.108202 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila1b16-account-delete-2wj6b"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.114322 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-1b16-account-create-update-hcbbx"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.119898 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila1b16-account-delete-2wj6b"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.124977 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-1b16-account-create-update-hcbbx"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189074 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-create-6f7lr"] Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189395 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189418 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189435 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189444 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189458 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api-log" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189467 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api-log" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189480 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="88106aba-660a-43de-ba05-bdb77e13c0f7" containerName="mariadb-account-delete" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189488 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="88106aba-660a-43de-ba05-bdb77e13c0f7" containerName="mariadb-account-delete" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189501 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="manila-share" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189509 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="manila-share" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189523 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189534 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api" Feb 02 13:13:34 crc kubenswrapper[4703]: E0202 13:13:34.189559 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="manila-scheduler" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189567 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="manila-scheduler" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189713 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="manila-scheduler" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189726 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189742 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="2db4f393-e3be-438e-90aa-c7d861857d5d" containerName="manila-share" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189753 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c690d78d-e839-457c-977b-1d4386653341" containerName="probe" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189765 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189775 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="88106aba-660a-43de-ba05-bdb77e13c0f7" containerName="mariadb-account-delete" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.189796 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4eed06dc-6be2-44ed-8346-ff7bc2b829ff" containerName="manila-api-log" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.190305 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.200737 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-6f7lr"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.283331 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-ef6c-account-create-update-sptnz"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.284108 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.285756 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-db-secret" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.293715 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-ef6c-account-create-update-sptnz"] Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.318385 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-vsw2g\" (UniqueName: \"kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.318459 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.419785 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-vsw2g\" (UniqueName: \"kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.419862 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-lh9bh\" (UniqueName: \"kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.419928 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.419990 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.420742 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.438091 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-vsw2g\" (UniqueName: \"kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g\") pod \"manila-db-create-6f7lr\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.506730 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.521446 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-lh9bh\" (UniqueName: \"kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.521550 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.522469 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.540425 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-lh9bh\" (UniqueName: \"kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh\") pod \"manila-ef6c-account-create-update-sptnz\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.597065 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:34 crc kubenswrapper[4703]: I0202 13:13:34.980976 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-create-6f7lr"] Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.031109 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-ef6c-account-create-update-sptnz"] Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.826717 4703 generic.go:334] "Generic (PLEG): container finished" podID="f02c2f52-b256-449b-9313-40f8ddfd8df7" containerID="f2d2e17bf7340fe1607f4b2de00210124c8bba280bcfaa908b41230bc7ef167d" exitCode=0 Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.826773 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-6f7lr" event={"ID":"f02c2f52-b256-449b-9313-40f8ddfd8df7","Type":"ContainerDied","Data":"f2d2e17bf7340fe1607f4b2de00210124c8bba280bcfaa908b41230bc7ef167d"} Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.827432 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-6f7lr" event={"ID":"f02c2f52-b256-449b-9313-40f8ddfd8df7","Type":"ContainerStarted","Data":"d6ce99f91530e4e8abce2fd6cd50b30f7ac4e049e72072dd7e0ce51f66221a9f"} Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.828845 4703 generic.go:334] "Generic (PLEG): container finished" podID="ab721f7b-8474-4f1d-ad73-98421e2d1215" containerID="206a632afb2b7b2b6b746cef67a1e5d812e786097adf3ce7a1e755e50373396e" exitCode=0 Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.828875 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" event={"ID":"ab721f7b-8474-4f1d-ad73-98421e2d1215","Type":"ContainerDied","Data":"206a632afb2b7b2b6b746cef67a1e5d812e786097adf3ce7a1e755e50373396e"} Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.828892 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" event={"ID":"ab721f7b-8474-4f1d-ad73-98421e2d1215","Type":"ContainerStarted","Data":"dfed95e8962cde54f3e821fe24e17911c9364f7e0a58977a6714bda82f1ec302"} Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.945335 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2984c983-0968-4970-abbd-087609948c41" path="/var/lib/kubelet/pods/2984c983-0968-4970-abbd-087609948c41/volumes" Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.947694 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="83d8a461-7137-49da-ae6d-1a50ebc3d4f1" path="/var/lib/kubelet/pods/83d8a461-7137-49da-ae6d-1a50ebc3d4f1/volumes" Feb 02 13:13:35 crc kubenswrapper[4703]: I0202 13:13:35.948363 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="88106aba-660a-43de-ba05-bdb77e13c0f7" path="/var/lib/kubelet/pods/88106aba-660a-43de-ba05-bdb77e13c0f7/volumes" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.180776 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.189444 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.273487 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts\") pod \"ab721f7b-8474-4f1d-ad73-98421e2d1215\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.273566 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-lh9bh\" (UniqueName: \"kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh\") pod \"ab721f7b-8474-4f1d-ad73-98421e2d1215\" (UID: \"ab721f7b-8474-4f1d-ad73-98421e2d1215\") " Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.273603 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts\") pod \"f02c2f52-b256-449b-9313-40f8ddfd8df7\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.273692 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vsw2g\" (UniqueName: \"kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g\") pod \"f02c2f52-b256-449b-9313-40f8ddfd8df7\" (UID: \"f02c2f52-b256-449b-9313-40f8ddfd8df7\") " Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.274157 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "ab721f7b-8474-4f1d-ad73-98421e2d1215" (UID: "ab721f7b-8474-4f1d-ad73-98421e2d1215"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.274195 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "f02c2f52-b256-449b-9313-40f8ddfd8df7" (UID: "f02c2f52-b256-449b-9313-40f8ddfd8df7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.278961 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh" (OuterVolumeSpecName: "kube-api-access-lh9bh") pod "ab721f7b-8474-4f1d-ad73-98421e2d1215" (UID: "ab721f7b-8474-4f1d-ad73-98421e2d1215"). InnerVolumeSpecName "kube-api-access-lh9bh". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.280342 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g" (OuterVolumeSpecName: "kube-api-access-vsw2g") pod "f02c2f52-b256-449b-9313-40f8ddfd8df7" (UID: "f02c2f52-b256-449b-9313-40f8ddfd8df7"). InnerVolumeSpecName "kube-api-access-vsw2g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.375494 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/f02c2f52-b256-449b-9313-40f8ddfd8df7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.375538 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vsw2g\" (UniqueName: \"kubernetes.io/projected/f02c2f52-b256-449b-9313-40f8ddfd8df7-kube-api-access-vsw2g\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.375559 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/ab721f7b-8474-4f1d-ad73-98421e2d1215-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.375573 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-lh9bh\" (UniqueName: \"kubernetes.io/projected/ab721f7b-8474-4f1d-ad73-98421e2d1215-kube-api-access-lh9bh\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.844894 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-create-6f7lr" event={"ID":"f02c2f52-b256-449b-9313-40f8ddfd8df7","Type":"ContainerDied","Data":"d6ce99f91530e4e8abce2fd6cd50b30f7ac4e049e72072dd7e0ce51f66221a9f"} Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.844927 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-create-6f7lr" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.844963 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="d6ce99f91530e4e8abce2fd6cd50b30f7ac4e049e72072dd7e0ce51f66221a9f" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.846882 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" event={"ID":"ab721f7b-8474-4f1d-ad73-98421e2d1215","Type":"ContainerDied","Data":"dfed95e8962cde54f3e821fe24e17911c9364f7e0a58977a6714bda82f1ec302"} Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.846932 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="dfed95e8962cde54f3e821fe24e17911c9364f7e0a58977a6714bda82f1ec302" Feb 02 13:13:37 crc kubenswrapper[4703]: I0202 13:13:37.846992 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-ef6c-account-create-update-sptnz" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.504919 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-db-sync-5rpp4"] Feb 02 13:13:39 crc kubenswrapper[4703]: E0202 13:13:39.505174 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ab721f7b-8474-4f1d-ad73-98421e2d1215" containerName="mariadb-account-create-update" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.505187 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ab721f7b-8474-4f1d-ad73-98421e2d1215" containerName="mariadb-account-create-update" Feb 02 13:13:39 crc kubenswrapper[4703]: E0202 13:13:39.505201 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f02c2f52-b256-449b-9313-40f8ddfd8df7" containerName="mariadb-database-create" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.505208 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f02c2f52-b256-449b-9313-40f8ddfd8df7" containerName="mariadb-database-create" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.505348 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="ab721f7b-8474-4f1d-ad73-98421e2d1215" containerName="mariadb-account-create-update" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.505358 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f02c2f52-b256-449b-9313-40f8ddfd8df7" containerName="mariadb-database-create" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.505806 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.507495 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-v9qh5" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.507687 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.518961 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-5rpp4"] Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.604047 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.604132 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.604198 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-9k4q2\" (UniqueName: \"kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.705512 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.705623 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.705668 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-9k4q2\" (UniqueName: \"kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.710253 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.710569 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.723710 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-9k4q2\" (UniqueName: \"kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2\") pod \"manila-db-sync-5rpp4\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:39 crc kubenswrapper[4703]: I0202 13:13:39.823055 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:40 crc kubenswrapper[4703]: I0202 13:13:40.287011 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-db-sync-5rpp4"] Feb 02 13:13:40 crc kubenswrapper[4703]: I0202 13:13:40.869625 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-5rpp4" event={"ID":"775a9762-1bba-4877-a238-774b73d8b360","Type":"ContainerStarted","Data":"fd3a33e2fb2a5300e2e30b1fc9393d041b9e1e4ab7b75f62b9abb1b5b8c0553c"} Feb 02 13:13:40 crc kubenswrapper[4703]: I0202 13:13:40.870087 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-5rpp4" event={"ID":"775a9762-1bba-4877-a238-774b73d8b360","Type":"ContainerStarted","Data":"f8cca25cb0f451bc675ec64091f086381aee47b9d74598061c9ba363424ba796"} Feb 02 13:13:40 crc kubenswrapper[4703]: I0202 13:13:40.890078 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-db-sync-5rpp4" podStartSLOduration=1.890054882 podStartE2EDuration="1.890054882s" podCreationTimestamp="2026-02-02 13:13:39 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:40.887791388 +0000 UTC m=+1347.902998992" watchObservedRunningTime="2026-02-02 13:13:40.890054882 +0000 UTC m=+1347.905262416" Feb 02 13:13:42 crc kubenswrapper[4703]: I0202 13:13:42.885507 4703 generic.go:334] "Generic (PLEG): container finished" podID="775a9762-1bba-4877-a238-774b73d8b360" containerID="fd3a33e2fb2a5300e2e30b1fc9393d041b9e1e4ab7b75f62b9abb1b5b8c0553c" exitCode=0 Feb 02 13:13:42 crc kubenswrapper[4703]: I0202 13:13:42.885565 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-5rpp4" event={"ID":"775a9762-1bba-4877-a238-774b73d8b360","Type":"ContainerDied","Data":"fd3a33e2fb2a5300e2e30b1fc9393d041b9e1e4ab7b75f62b9abb1b5b8c0553c"} Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.179914 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.277144 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data\") pod \"775a9762-1bba-4877-a238-774b73d8b360\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.277184 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9k4q2\" (UniqueName: \"kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2\") pod \"775a9762-1bba-4877-a238-774b73d8b360\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.277302 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data\") pod \"775a9762-1bba-4877-a238-774b73d8b360\" (UID: \"775a9762-1bba-4877-a238-774b73d8b360\") " Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.282800 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "775a9762-1bba-4877-a238-774b73d8b360" (UID: "775a9762-1bba-4877-a238-774b73d8b360"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.282966 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2" (OuterVolumeSpecName: "kube-api-access-9k4q2") pod "775a9762-1bba-4877-a238-774b73d8b360" (UID: "775a9762-1bba-4877-a238-774b73d8b360"). InnerVolumeSpecName "kube-api-access-9k4q2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.285452 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data" (OuterVolumeSpecName: "config-data") pod "775a9762-1bba-4877-a238-774b73d8b360" (UID: "775a9762-1bba-4877-a238-774b73d8b360"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.378766 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.379004 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9k4q2\" (UniqueName: \"kubernetes.io/projected/775a9762-1bba-4877-a238-774b73d8b360-kube-api-access-9k4q2\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.379018 4703 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/775a9762-1bba-4877-a238-774b73d8b360-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.901331 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-db-sync-5rpp4" event={"ID":"775a9762-1bba-4877-a238-774b73d8b360","Type":"ContainerDied","Data":"f8cca25cb0f451bc675ec64091f086381aee47b9d74598061c9ba363424ba796"} Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.901373 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="f8cca25cb0f451bc675ec64091f086381aee47b9d74598061c9ba363424ba796" Feb 02 13:13:44 crc kubenswrapper[4703]: I0202 13:13:44.901394 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-db-sync-5rpp4" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.159763 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: E0202 13:13:45.160120 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="775a9762-1bba-4877-a238-774b73d8b360" containerName="manila-db-sync" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.160135 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="775a9762-1bba-4877-a238-774b73d8b360" containerName="manila-db-sync" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.160320 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="775a9762-1bba-4877-a238-774b73d8b360" containerName="manila-db-sync" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.161255 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.164581 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-config-data" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.164825 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scripts" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.165000 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-scheduler-config-data" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.165389 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-manila-dockercfg-v9qh5" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.170778 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.261191 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.262213 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.264348 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share0-config-data" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.264572 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"ceph-conf-files" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.274318 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.298009 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.298050 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.298075 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.298327 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.298399 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-6skgg\" (UniqueName: \"kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.341036 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.342239 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.347035 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-api-config-data" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.360477 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399206 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399263 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399302 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399325 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399400 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399427 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-6skgg\" (UniqueName: \"kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399456 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399471 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399488 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399503 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399520 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399534 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4nmff\" (UniqueName: \"kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.399665 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.403103 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.403684 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.407141 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.417044 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-6skgg\" (UniqueName: \"kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg\") pod \"manila-scheduler-0\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.483291 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502773 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502835 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-52sh7\" (UniqueName: \"kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502869 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502923 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4nmff\" (UniqueName: \"kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502953 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.502979 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503016 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503042 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503063 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503088 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503118 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503147 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503173 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.503413 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.507715 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.510575 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.511772 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.514379 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.515019 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.524950 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4nmff\" (UniqueName: \"kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff\") pod \"manila-share-share0-0\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.576542 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604429 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-52sh7\" (UniqueName: \"kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604514 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604579 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604615 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604655 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604712 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.604803 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.606083 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.609921 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.610522 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.615841 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.630761 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-52sh7\" (UniqueName: \"kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7\") pod \"manila-api-0\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.656769 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.733069 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.902002 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:13:45 crc kubenswrapper[4703]: W0202 13:13:45.909254 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-podfbf2e605_6ef5_4f1d_bea6_22cf05aa3308.slice/crio-3472430197aa240ddb4c648c8e6172fc5e01ebb921a592ab51f5b9f76ec7f85b WatchSource:0}: Error finding container 3472430197aa240ddb4c648c8e6172fc5e01ebb921a592ab51f5b9f76ec7f85b: Status 404 returned error can't find the container with id 3472430197aa240ddb4c648c8e6172fc5e01ebb921a592ab51f5b9f76ec7f85b Feb 02 13:13:45 crc kubenswrapper[4703]: I0202 13:13:45.910762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerStarted","Data":"d0293ab63c4cc6e133439ce5871ec54f9fbfdeb211c58ffe4c916328c22f4ee4"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.047586 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:13:46 crc kubenswrapper[4703]: W0202 13:13:46.076816 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod8ad4a92e_fdcb_4f08_8874_e4e27e9ae96b.slice/crio-c64789183d12814bfeefdb26420f6c72cdbdf4816e0ac7055a04fe08502d78ed WatchSource:0}: Error finding container c64789183d12814bfeefdb26420f6c72cdbdf4816e0ac7055a04fe08502d78ed: Status 404 returned error can't find the container with id c64789183d12814bfeefdb26420f6c72cdbdf4816e0ac7055a04fe08502d78ed Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.921669 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerStarted","Data":"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.922204 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerStarted","Data":"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.927330 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerStarted","Data":"c58fed4bfa018c1c1011ece934454c7cb5c805e238808ad539766d6cfd7fc301"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.927406 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerStarted","Data":"6cdd834a2f2c5cbd64c9a55a08c30fd187e352c8b7c4740f0e94f7c1fbcc0528"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.927423 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerStarted","Data":"3472430197aa240ddb4c648c8e6172fc5e01ebb921a592ab51f5b9f76ec7f85b"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.928505 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.930862 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerStarted","Data":"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.930899 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerStarted","Data":"c64789183d12814bfeefdb26420f6c72cdbdf4816e0ac7055a04fe08502d78ed"} Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.972359 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-scheduler-0" podStartSLOduration=1.9723381070000001 podStartE2EDuration="1.972338107s" podCreationTimestamp="2026-02-02 13:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:46.942356366 +0000 UTC m=+1353.957563900" watchObservedRunningTime="2026-02-02 13:13:46.972338107 +0000 UTC m=+1353.987545641" Feb 02 13:13:46 crc kubenswrapper[4703]: I0202 13:13:46.973222 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-api-0" podStartSLOduration=1.973215831 podStartE2EDuration="1.973215831s" podCreationTimestamp="2026-02-02 13:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:46.962992235 +0000 UTC m=+1353.978199799" watchObservedRunningTime="2026-02-02 13:13:46.973215831 +0000 UTC m=+1353.988423365" Feb 02 13:13:47 crc kubenswrapper[4703]: I0202 13:13:47.960823 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerStarted","Data":"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c"} Feb 02 13:13:47 crc kubenswrapper[4703]: I0202 13:13:47.976685 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share0-0" podStartSLOduration=2.976520726 podStartE2EDuration="2.976520726s" podCreationTimestamp="2026-02-02 13:13:45 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:13:47.97309906 +0000 UTC m=+1354.988306634" watchObservedRunningTime="2026-02-02 13:13:47.976520726 +0000 UTC m=+1354.991728260" Feb 02 13:13:55 crc kubenswrapper[4703]: I0202 13:13:55.484467 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:13:55 crc kubenswrapper[4703]: I0202 13:13:55.576807 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:14:07 crc kubenswrapper[4703]: I0202 13:14:07.038155 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:14:07 crc kubenswrapper[4703]: I0202 13:14:07.135686 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:14:07 crc kubenswrapper[4703]: I0202 13:14:07.164111 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.257489 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.258479 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.260674 4703 reflector.go:368] Caches populated for *v1.Secret from object-"manila-kuttl-tests"/"manila-share-share1-config-data" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.272886 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.439868 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.439933 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.440023 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.440048 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.440240 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.440401 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cnhwf\" (UniqueName: \"kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.440450 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542121 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cnhwf\" (UniqueName: \"kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542195 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542262 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542358 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542481 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542524 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542562 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542651 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.542754 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.549154 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.549230 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.549596 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.559987 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.561014 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cnhwf\" (UniqueName: \"kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf\") pod \"manila-share-share1-0\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:08 crc kubenswrapper[4703]: I0202 13:14:08.584308 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:09 crc kubenswrapper[4703]: I0202 13:14:09.006772 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:09 crc kubenswrapper[4703]: W0202 13:14:09.017629 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17628b62_3c9d_4d0e_9c33_92dfdf518d4c.slice/crio-34be6171ba062a133e11de5acb1f7fb8887a4909f091531b277b76ca7aac310d WatchSource:0}: Error finding container 34be6171ba062a133e11de5acb1f7fb8887a4909f091531b277b76ca7aac310d: Status 404 returned error can't find the container with id 34be6171ba062a133e11de5acb1f7fb8887a4909f091531b277b76ca7aac310d Feb 02 13:14:09 crc kubenswrapper[4703]: I0202 13:14:09.118162 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerStarted","Data":"34be6171ba062a133e11de5acb1f7fb8887a4909f091531b277b76ca7aac310d"} Feb 02 13:14:10 crc kubenswrapper[4703]: I0202 13:14:10.128048 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerStarted","Data":"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b"} Feb 02 13:14:10 crc kubenswrapper[4703]: I0202 13:14:10.128091 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerStarted","Data":"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d"} Feb 02 13:14:18 crc kubenswrapper[4703]: I0202 13:14:18.585205 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:30 crc kubenswrapper[4703]: I0202 13:14:30.226709 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:30 crc kubenswrapper[4703]: I0202 13:14:30.249292 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-share-share1-0" podStartSLOduration=22.249251563 podStartE2EDuration="22.249251563s" podCreationTimestamp="2026-02-02 13:14:08 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:14:10.148608179 +0000 UTC m=+1377.163815743" watchObservedRunningTime="2026-02-02 13:14:30.249251563 +0000 UTC m=+1397.264459097" Feb 02 13:14:30 crc kubenswrapper[4703]: I0202 13:14:30.998867 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:14:30 crc kubenswrapper[4703]: I0202 13:14:30.999102 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="manila-share" containerID="cri-o://53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" gracePeriod=30 Feb 02 13:14:30 crc kubenswrapper[4703]: I0202 13:14:30.999222 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share0-0" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="probe" containerID="cri-o://52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" gracePeriod=30 Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.031058 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.132753 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.132863 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.132914 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.132941 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4nmff\" (UniqueName: \"kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.132962 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.133007 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.133020 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila\") pod \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\" (UID: \"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b\") " Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.133199 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.133772 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.139246 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph" (OuterVolumeSpecName: "ceph") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.139588 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts" (OuterVolumeSpecName: "scripts") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.139818 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff" (OuterVolumeSpecName: "kube-api-access-4nmff") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "kube-api-access-4nmff". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.140097 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.196173 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data" (OuterVolumeSpecName: "config-data") pod "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" (UID: "8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234653 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234718 4703 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-var-lib-manila\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234741 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234810 4703 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234832 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234849 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4nmff\" (UniqueName: \"kubernetes.io/projected/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-kube-api-access-4nmff\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.234903 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301611 4703 generic.go:334] "Generic (PLEG): container finished" podID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerID="52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" exitCode=0 Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301659 4703 generic.go:334] "Generic (PLEG): container finished" podID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerID="53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" exitCode=1 Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301679 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerDied","Data":"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c"} Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301708 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share0-0" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301721 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerDied","Data":"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8"} Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301732 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share0-0" event={"ID":"8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b","Type":"ContainerDied","Data":"c64789183d12814bfeefdb26420f6c72cdbdf4816e0ac7055a04fe08502d78ed"} Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.301750 4703 scope.go:117] "RemoveContainer" containerID="52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.321127 4703 scope.go:117] "RemoveContainer" containerID="53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.343825 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.348827 4703 scope.go:117] "RemoveContainer" containerID="52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" Feb 02 13:14:32 crc kubenswrapper[4703]: E0202 13:14:32.349813 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c\": container with ID starting with 52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c not found: ID does not exist" containerID="52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.349866 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c"} err="failed to get container status \"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c\": rpc error: code = NotFound desc = could not find container \"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c\": container with ID starting with 52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c not found: ID does not exist" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.349889 4703 scope.go:117] "RemoveContainer" containerID="53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" Feb 02 13:14:32 crc kubenswrapper[4703]: E0202 13:14:32.350191 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8\": container with ID starting with 53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8 not found: ID does not exist" containerID="53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.350217 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8"} err="failed to get container status \"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8\": rpc error: code = NotFound desc = could not find container \"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8\": container with ID starting with 53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8 not found: ID does not exist" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.350234 4703 scope.go:117] "RemoveContainer" containerID="52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.350577 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c"} err="failed to get container status \"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c\": rpc error: code = NotFound desc = could not find container \"52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c\": container with ID starting with 52ce44a6987d2c707c11e19d19d25afd83693940b741daec53aceb2fcccc7b2c not found: ID does not exist" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.350630 4703 scope.go:117] "RemoveContainer" containerID="53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.350973 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8"} err="failed to get container status \"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8\": rpc error: code = NotFound desc = could not find container \"53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8\": container with ID starting with 53ece8fceba2663cf940e7f42509ae5b8f391604e0e17a445503890d0b4b27f8 not found: ID does not exist" Feb 02 13:14:32 crc kubenswrapper[4703]: I0202 13:14:32.353761 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share0-0"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.311011 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:14:33 crc kubenswrapper[4703]: E0202 13:14:33.311672 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="manila-share" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.311688 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="manila-share" Feb 02 13:14:33 crc kubenswrapper[4703]: E0202 13:14:33.311705 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="probe" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.311714 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="probe" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.311878 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="manila-share" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.311898 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" containerName="probe" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.312459 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.328028 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.357950 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.358039 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.358074 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-rd96m\" (UniqueName: \"kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.458770 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-rd96m\" (UniqueName: \"kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.458879 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.458923 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.463105 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.463401 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.475054 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-rd96m\" (UniqueName: \"kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m\") pod \"manila-service-cleanup-n5b5h655-4tsfr\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.632150 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.859934 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.869388 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-sync-5rpp4"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.884556 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-sync-5rpp4"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.921987 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.922213 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share1-0" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="manila-share" containerID="cri-o://96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" gracePeriod=30 Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.922332 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-share-share1-0" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="probe" containerID="cri-o://c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" gracePeriod=30 Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.930792 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.931065 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="manila-scheduler" containerID="cri-o://1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa" gracePeriod=30 Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.931404 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-scheduler-0" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="probe" containerID="cri-o://0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2" gracePeriod=30 Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.998499 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="775a9762-1bba-4877-a238-774b73d8b360" path="/var/lib/kubelet/pods/775a9762-1bba-4877-a238-774b73d8b360/volumes" Feb 02 13:14:33 crc kubenswrapper[4703]: I0202 13:14:33.999197 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b" path="/var/lib/kubelet/pods/8ad4a92e-fdcb-4f08-8874-e4e27e9ae96b/volumes" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.014387 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/manilaef6c-account-delete-7mmwz"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.015233 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.029388 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.029671 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api-log" containerID="cri-o://6cdd834a2f2c5cbd64c9a55a08c30fd187e352c8b7c4740f0e94f7c1fbcc0528" gracePeriod=30 Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.029810 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-api-0" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api" containerID="cri-o://c58fed4bfa018c1c1011ece934454c7cb5c805e238808ad539766d6cfd7fc301" gracePeriod=30 Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.070338 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manilaef6c-account-delete-7mmwz"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.090647 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.090721 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cspj4\" (UniqueName: \"kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.157322 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.191777 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.191839 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cspj4\" (UniqueName: \"kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.193120 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.212436 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cspj4\" (UniqueName: \"kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4\") pod \"manilaef6c-account-delete-7mmwz\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.337853 4703 generic.go:334] "Generic (PLEG): container finished" podID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerID="6cdd834a2f2c5cbd64c9a55a08c30fd187e352c8b7c4740f0e94f7c1fbcc0528" exitCode=143 Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.338147 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerDied","Data":"6cdd834a2f2c5cbd64c9a55a08c30fd187e352c8b7c4740f0e94f7c1fbcc0528"} Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.341564 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" event={"ID":"6a995ed1-b4b5-43d3-91cf-2f461916e342","Type":"ContainerStarted","Data":"582f16779ad184d11afece6c8063b66fe75957aa9f369b5e6f641c3f87009c89"} Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.348461 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:34 crc kubenswrapper[4703]: E0202 13:14:34.384309 4703 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17628b62_3c9d_4d0e_9c33_92dfdf518d4c.slice/crio-c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98006c61_e202_4f08_92f5_8805fb51d793.slice/crio-conmon-0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod98006c61_e202_4f08_92f5_8805fb51d793.slice/crio-0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2.scope\": RecentStats: unable to find data in memory cache], [\"/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod17628b62_3c9d_4d0e_9c33_92dfdf518d4c.slice/crio-conmon-c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b.scope\": RecentStats: unable to find data in memory cache]" Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.693207 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/manilaef6c-account-delete-7mmwz"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.810376 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.812542 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerName="manager" containerID="cri-o://fb0c5de87af8b6ad2c512285953b34ad44a6b93455ee84200dcb735f64300d9c" gracePeriod=10 Feb 02 13:14:34 crc kubenswrapper[4703]: I0202 13:14:34.934727 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerName="manager" probeResult="failure" output="Get \"http://10.217.0.84:8081/readyz\": dial tcp 10.217.0.84:8081: connect: connection refused" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.039614 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.040065 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/manila-operator-index-tbn87" podUID="3daa09fb-f816-46c0-a43f-312554985188" containerName="registry-server" containerID="cri-o://60ea15301152b9f689dd5d9ce73c78ada802ad99c4bae753a875e04975e2360a" gracePeriod=30 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.058648 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.064352 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/6da8eb178e117ecdf8984f3af3db770d1434e90ec7fa02fda018706b6dm646s"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.199163 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323002 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323042 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cnhwf\" (UniqueName: \"kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323105 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323138 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323167 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323192 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.323235 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom\") pod \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\" (UID: \"17628b62-3c9d-4d0e-9c33-92dfdf518d4c\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.324579 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.325465 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila" (OuterVolumeSpecName: "var-lib-manila") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "var-lib-manila". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.329429 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph" (OuterVolumeSpecName: "ceph") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "ceph". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.330442 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts" (OuterVolumeSpecName: "scripts") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.330832 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.331037 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf" (OuterVolumeSpecName: "kube-api-access-cnhwf") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "kube-api-access-cnhwf". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.365873 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" event={"ID":"27ae1308-7fc6-4a91-92d7-d52fefac9b17","Type":"ContainerStarted","Data":"bcc6c65d82660bda38b69146c5e9a2d927b08144003f311a114f87e61b87f83f"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.365916 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" event={"ID":"27ae1308-7fc6-4a91-92d7-d52fefac9b17","Type":"ContainerStarted","Data":"b5acaa45ccb7de92b1f21e682e0f40f1ecaf91b33b02fcada7d428da0748d39b"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.366378 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.370875 4703 generic.go:334] "Generic (PLEG): container finished" podID="3daa09fb-f816-46c0-a43f-312554985188" containerID="60ea15301152b9f689dd5d9ce73c78ada802ad99c4bae753a875e04975e2360a" exitCode=0 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.370950 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-tbn87" event={"ID":"3daa09fb-f816-46c0-a43f-312554985188","Type":"ContainerDied","Data":"60ea15301152b9f689dd5d9ce73c78ada802ad99c4bae753a875e04975e2360a"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.379972 4703 generic.go:334] "Generic (PLEG): container finished" podID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerID="c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" exitCode=0 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380005 4703 generic.go:334] "Generic (PLEG): container finished" podID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerID="96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" exitCode=1 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380058 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerDied","Data":"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380088 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerDied","Data":"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380102 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-share-share1-0" event={"ID":"17628b62-3c9d-4d0e-9c33-92dfdf518d4c","Type":"ContainerDied","Data":"34be6171ba062a133e11de5acb1f7fb8887a4909f091531b277b76ca7aac310d"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380119 4703 scope.go:117] "RemoveContainer" containerID="c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.380262 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-share-share1-0" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.382999 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" podStartSLOduration=2.382982362 podStartE2EDuration="2.382982362s" podCreationTimestamp="2026-02-02 13:14:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:14:35.379566075 +0000 UTC m=+1402.394773609" watchObservedRunningTime="2026-02-02 13:14:35.382982362 +0000 UTC m=+1402.398189896" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.402406 4703 generic.go:334] "Generic (PLEG): container finished" podID="98006c61-e202-4f08-92f5-8805fb51d793" containerID="0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2" exitCode=0 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.402472 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerDied","Data":"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.405010 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" event={"ID":"6a995ed1-b4b5-43d3-91cf-2f461916e342","Type":"ContainerStarted","Data":"577887e8178672ae76d1175143f549e287d8a12ad99c51f5ad101e0260d6f4dd"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.405154 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" podUID="6a995ed1-b4b5-43d3-91cf-2f461916e342" containerName="manila-service-cleanup-n5b5h655" containerID="cri-o://577887e8178672ae76d1175143f549e287d8a12ad99c51f5ad101e0260d6f4dd" gracePeriod=30 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.407914 4703 generic.go:334] "Generic (PLEG): container finished" podID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerID="fb0c5de87af8b6ad2c512285953b34ad44a6b93455ee84200dcb735f64300d9c" exitCode=0 Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.407954 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" event={"ID":"e9cf77c3-92e1-44dd-8390-be3e5845b011","Type":"ContainerDied","Data":"fb0c5de87af8b6ad2c512285953b34ad44a6b93455ee84200dcb735f64300d9c"} Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.408004 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.419241 4703 scope.go:117] "RemoveContainer" containerID="96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429350 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429383 4703 reconciler_common.go:293] "Volume detached for volume \"var-lib-manila\" (UniqueName: \"kubernetes.io/host-path/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-var-lib-manila\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429392 4703 reconciler_common.go:293] "Volume detached for volume \"ceph\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-ceph\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429401 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429409 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.429417 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cnhwf\" (UniqueName: \"kubernetes.io/projected/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-kube-api-access-cnhwf\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.432616 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data" (OuterVolumeSpecName: "config-data") pod "17628b62-3c9d-4d0e-9c33-92dfdf518d4c" (UID: "17628b62-3c9d-4d0e-9c33-92dfdf518d4c"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.433239 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" podStartSLOduration=2.433224163 podStartE2EDuration="2.433224163s" podCreationTimestamp="2026-02-02 13:14:33 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:14:35.428864539 +0000 UTC m=+1402.444072083" watchObservedRunningTime="2026-02-02 13:14:35.433224163 +0000 UTC m=+1402.448431697" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.442433 4703 scope.go:117] "RemoveContainer" containerID="c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" Feb 02 13:14:35 crc kubenswrapper[4703]: E0202 13:14:35.442922 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b\": container with ID starting with c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b not found: ID does not exist" containerID="c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.442969 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b"} err="failed to get container status \"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b\": rpc error: code = NotFound desc = could not find container \"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b\": container with ID starting with c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b not found: ID does not exist" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443002 4703 scope.go:117] "RemoveContainer" containerID="96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" Feb 02 13:14:35 crc kubenswrapper[4703]: E0202 13:14:35.443393 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d\": container with ID starting with 96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d not found: ID does not exist" containerID="96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443420 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d"} err="failed to get container status \"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d\": rpc error: code = NotFound desc = could not find container \"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d\": container with ID starting with 96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d not found: ID does not exist" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443440 4703 scope.go:117] "RemoveContainer" containerID="c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443671 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b"} err="failed to get container status \"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b\": rpc error: code = NotFound desc = could not find container \"c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b\": container with ID starting with c9df8f9633fcee8de9aec9798ebedd921747e0854329906b0dfdb30de8162a0b not found: ID does not exist" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443695 4703 scope.go:117] "RemoveContainer" containerID="96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443958 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d"} err="failed to get container status \"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d\": rpc error: code = NotFound desc = could not find container \"96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d\": container with ID starting with 96b2827971d856377bbf216121c5e1b1b4b22f57d08f191fe35476ea6a9d7a0d not found: ID does not exist" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.443982 4703 scope.go:117] "RemoveContainer" containerID="fb0c5de87af8b6ad2c512285953b34ad44a6b93455ee84200dcb735f64300d9c" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.530411 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rmxz7\" (UniqueName: \"kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7\") pod \"e9cf77c3-92e1-44dd-8390-be3e5845b011\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.530483 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert\") pod \"e9cf77c3-92e1-44dd-8390-be3e5845b011\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.530556 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert\") pod \"e9cf77c3-92e1-44dd-8390-be3e5845b011\" (UID: \"e9cf77c3-92e1-44dd-8390-be3e5845b011\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.530887 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/17628b62-3c9d-4d0e-9c33-92dfdf518d4c-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.534126 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "e9cf77c3-92e1-44dd-8390-be3e5845b011" (UID: "e9cf77c3-92e1-44dd-8390-be3e5845b011"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.536363 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "e9cf77c3-92e1-44dd-8390-be3e5845b011" (UID: "e9cf77c3-92e1-44dd-8390-be3e5845b011"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.539647 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7" (OuterVolumeSpecName: "kube-api-access-rmxz7") pod "e9cf77c3-92e1-44dd-8390-be3e5845b011" (UID: "e9cf77c3-92e1-44dd-8390-be3e5845b011"). InnerVolumeSpecName "kube-api-access-rmxz7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.625335 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.632040 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rmxz7\" (UniqueName: \"kubernetes.io/projected/e9cf77c3-92e1-44dd-8390-be3e5845b011-kube-api-access-rmxz7\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.632069 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.632081 4703 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/e9cf77c3-92e1-44dd-8390-be3e5845b011-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.723402 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.733200 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-9rz4k\" (UniqueName: \"kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k\") pod \"3daa09fb-f816-46c0-a43f-312554985188\" (UID: \"3daa09fb-f816-46c0-a43f-312554985188\") " Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.739238 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k" (OuterVolumeSpecName: "kube-api-access-9rz4k") pod "3daa09fb-f816-46c0-a43f-312554985188" (UID: "3daa09fb-f816-46c0-a43f-312554985188"). InnerVolumeSpecName "kube-api-access-9rz4k". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.742349 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-share-share1-0"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.761086 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.765384 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-controller-manager-f55bcb75d-7r4kb"] Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.835114 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-9rz4k\" (UniqueName: \"kubernetes.io/projected/3daa09fb-f816-46c0-a43f-312554985188-kube-api-access-9rz4k\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.942356 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" path="/var/lib/kubelet/pods/17628b62-3c9d-4d0e-9c33-92dfdf518d4c/volumes" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.943140 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b77f27da-6dec-4980-87d9-7e31b34e413d" path="/var/lib/kubelet/pods/b77f27da-6dec-4980-87d9-7e31b34e413d/volumes" Feb 02 13:14:35 crc kubenswrapper[4703]: I0202 13:14:35.943912 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" path="/var/lib/kubelet/pods/e9cf77c3-92e1-44dd-8390-be3e5845b011/volumes" Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.417027 4703 generic.go:334] "Generic (PLEG): container finished" podID="27ae1308-7fc6-4a91-92d7-d52fefac9b17" containerID="bcc6c65d82660bda38b69146c5e9a2d927b08144003f311a114f87e61b87f83f" exitCode=0 Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.417098 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" event={"ID":"27ae1308-7fc6-4a91-92d7-d52fefac9b17","Type":"ContainerDied","Data":"bcc6c65d82660bda38b69146c5e9a2d927b08144003f311a114f87e61b87f83f"} Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.418687 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/manila-operator-index-tbn87" event={"ID":"3daa09fb-f816-46c0-a43f-312554985188","Type":"ContainerDied","Data":"71cacafb1fc434deb6f2dc44591881a07c67ae31dbccce2c2191797362ec9c61"} Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.418718 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/manila-operator-index-tbn87" Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.418807 4703 scope.go:117] "RemoveContainer" containerID="60ea15301152b9f689dd5d9ce73c78ada802ad99c4bae753a875e04975e2360a" Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.455110 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:14:36 crc kubenswrapper[4703]: I0202 13:14:36.461072 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/manila-operator-index-tbn87"] Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.317899 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="manila-kuttl-tests/manila-api-0" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api" probeResult="failure" output="Get \"http://10.217.0.109:8786/healthcheck\": read tcp 10.217.0.2:57576->10.217.0.109:8786: read: connection reset by peer" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.479054 4703 generic.go:334] "Generic (PLEG): container finished" podID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerID="c58fed4bfa018c1c1011ece934454c7cb5c805e238808ad539766d6cfd7fc301" exitCode=0 Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.479159 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerDied","Data":"c58fed4bfa018c1c1011ece934454c7cb5c805e238808ad539766d6cfd7fc301"} Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.700996 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.799114 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.805908 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.860854 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.860906 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.860946 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-52sh7\" (UniqueName: \"kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861023 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861045 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861082 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861131 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs\") pod \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\" (UID: \"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861478 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.861777 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs" (OuterVolumeSpecName: "logs") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "logs". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.866240 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.866288 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7" (OuterVolumeSpecName: "kube-api-access-52sh7") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "kube-api-access-52sh7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.872427 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts" (OuterVolumeSpecName: "scripts") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.893068 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data" (OuterVolumeSpecName: "config-data") pod "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" (UID: "fbf2e605-6ef5-4f1d-bea6-22cf05aa3308"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.940338 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3daa09fb-f816-46c0-a43f-312554985188" path="/var/lib/kubelet/pods/3daa09fb-f816-46c0-a43f-312554985188/volumes" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962032 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts\") pod \"98006c61-e202-4f08-92f5-8805fb51d793\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962105 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom\") pod \"98006c61-e202-4f08-92f5-8805fb51d793\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962136 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cspj4\" (UniqueName: \"kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4\") pod \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962196 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts\") pod \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\" (UID: \"27ae1308-7fc6-4a91-92d7-d52fefac9b17\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962241 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6skgg\" (UniqueName: \"kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg\") pod \"98006c61-e202-4f08-92f5-8805fb51d793\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962336 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data\") pod \"98006c61-e202-4f08-92f5-8805fb51d793\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962404 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id\") pod \"98006c61-e202-4f08-92f5-8805fb51d793\" (UID: \"98006c61-e202-4f08-92f5-8805fb51d793\") " Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962704 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id" (OuterVolumeSpecName: "etc-machine-id") pod "98006c61-e202-4f08-92f5-8805fb51d793" (UID: "98006c61-e202-4f08-92f5-8805fb51d793"). InnerVolumeSpecName "etc-machine-id". PluginName "kubernetes.io/host-path", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962795 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-52sh7\" (UniqueName: \"kubernetes.io/projected/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-kube-api-access-52sh7\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962814 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962827 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962838 4703 reconciler_common.go:293] "Volume detached for volume \"logs\" (UniqueName: \"kubernetes.io/empty-dir/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-logs\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.962850 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.963107 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "27ae1308-7fc6-4a91-92d7-d52fefac9b17" (UID: "27ae1308-7fc6-4a91-92d7-d52fefac9b17"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.965674 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts" (OuterVolumeSpecName: "scripts") pod "98006c61-e202-4f08-92f5-8805fb51d793" (UID: "98006c61-e202-4f08-92f5-8805fb51d793"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.965729 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4" (OuterVolumeSpecName: "kube-api-access-cspj4") pod "27ae1308-7fc6-4a91-92d7-d52fefac9b17" (UID: "27ae1308-7fc6-4a91-92d7-d52fefac9b17"). InnerVolumeSpecName "kube-api-access-cspj4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.965855 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg" (OuterVolumeSpecName: "kube-api-access-6skgg") pod "98006c61-e202-4f08-92f5-8805fb51d793" (UID: "98006c61-e202-4f08-92f5-8805fb51d793"). InnerVolumeSpecName "kube-api-access-6skgg". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:37 crc kubenswrapper[4703]: I0202 13:14:37.967136 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom" (OuterVolumeSpecName: "config-data-custom") pod "98006c61-e202-4f08-92f5-8805fb51d793" (UID: "98006c61-e202-4f08-92f5-8805fb51d793"). InnerVolumeSpecName "config-data-custom". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.023371 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data" (OuterVolumeSpecName: "config-data") pod "98006c61-e202-4f08-92f5-8805fb51d793" (UID: "98006c61-e202-4f08-92f5-8805fb51d793"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064210 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-custom\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data-custom\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064256 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cspj4\" (UniqueName: \"kubernetes.io/projected/27ae1308-7fc6-4a91-92d7-d52fefac9b17-kube-api-access-cspj4\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064289 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/27ae1308-7fc6-4a91-92d7-d52fefac9b17-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064304 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6skgg\" (UniqueName: \"kubernetes.io/projected/98006c61-e202-4f08-92f5-8805fb51d793-kube-api-access-6skgg\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064316 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064327 4703 reconciler_common.go:293] "Volume detached for volume \"etc-machine-id\" (UniqueName: \"kubernetes.io/host-path/98006c61-e202-4f08-92f5-8805fb51d793-etc-machine-id\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.064338 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/98006c61-e202-4f08-92f5-8805fb51d793-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.487575 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" event={"ID":"27ae1308-7fc6-4a91-92d7-d52fefac9b17","Type":"ContainerDied","Data":"b5acaa45ccb7de92b1f21e682e0f40f1ecaf91b33b02fcada7d428da0748d39b"} Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.487909 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="b5acaa45ccb7de92b1f21e682e0f40f1ecaf91b33b02fcada7d428da0748d39b" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.487751 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manilaef6c-account-delete-7mmwz" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.491169 4703 generic.go:334] "Generic (PLEG): container finished" podID="98006c61-e202-4f08-92f5-8805fb51d793" containerID="1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa" exitCode=0 Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.491401 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-scheduler-0" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.491723 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerDied","Data":"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa"} Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.492367 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-scheduler-0" event={"ID":"98006c61-e202-4f08-92f5-8805fb51d793","Type":"ContainerDied","Data":"d0293ab63c4cc6e133439ce5871ec54f9fbfdeb211c58ffe4c916328c22f4ee4"} Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.492432 4703 scope.go:117] "RemoveContainer" containerID="0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.495023 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-api-0" event={"ID":"fbf2e605-6ef5-4f1d-bea6-22cf05aa3308","Type":"ContainerDied","Data":"3472430197aa240ddb4c648c8e6172fc5e01ebb921a592ab51f5b9f76ec7f85b"} Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.495108 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-api-0" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.529805 4703 scope.go:117] "RemoveContainer" containerID="1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.532760 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.542054 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-api-0"] Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.549356 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.556897 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-scheduler-0"] Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.559792 4703 scope.go:117] "RemoveContainer" containerID="0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2" Feb 02 13:14:38 crc kubenswrapper[4703]: E0202 13:14:38.560149 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2\": container with ID starting with 0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2 not found: ID does not exist" containerID="0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.560191 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2"} err="failed to get container status \"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2\": rpc error: code = NotFound desc = could not find container \"0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2\": container with ID starting with 0e0492a2717b2828d791cf0ded6635d9c487c5fe4b6526da0ad43d28b72083f2 not found: ID does not exist" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.560210 4703 scope.go:117] "RemoveContainer" containerID="1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa" Feb 02 13:14:38 crc kubenswrapper[4703]: E0202 13:14:38.560524 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa\": container with ID starting with 1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa not found: ID does not exist" containerID="1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.560552 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa"} err="failed to get container status \"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa\": rpc error: code = NotFound desc = could not find container \"1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa\": container with ID starting with 1abefe1bb9985b1f5f8213e58985f21ff20eaf1b12c9ca225e4a5bbfa326a6aa not found: ID does not exist" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.560571 4703 scope.go:117] "RemoveContainer" containerID="c58fed4bfa018c1c1011ece934454c7cb5c805e238808ad539766d6cfd7fc301" Feb 02 13:14:38 crc kubenswrapper[4703]: I0202 13:14:38.589577 4703 scope.go:117] "RemoveContainer" containerID="6cdd834a2f2c5cbd64c9a55a08c30fd187e352c8b7c4740f0e94f7c1fbcc0528" Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.013520 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-db-create-6f7lr"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.023137 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-db-create-6f7lr"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.033022 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manilaef6c-account-delete-7mmwz"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.047262 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-ef6c-account-create-update-sptnz"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.055869 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manilaef6c-account-delete-7mmwz"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.066912 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-ef6c-account-create-update-sptnz"] Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.941519 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="27ae1308-7fc6-4a91-92d7-d52fefac9b17" path="/var/lib/kubelet/pods/27ae1308-7fc6-4a91-92d7-d52fefac9b17/volumes" Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.942353 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="98006c61-e202-4f08-92f5-8805fb51d793" path="/var/lib/kubelet/pods/98006c61-e202-4f08-92f5-8805fb51d793/volumes" Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.943103 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ab721f7b-8474-4f1d-ad73-98421e2d1215" path="/var/lib/kubelet/pods/ab721f7b-8474-4f1d-ad73-98421e2d1215/volumes" Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.944073 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f02c2f52-b256-449b-9313-40f8ddfd8df7" path="/var/lib/kubelet/pods/f02c2f52-b256-449b-9313-40f8ddfd8df7/volumes" Feb 02 13:14:39 crc kubenswrapper[4703]: I0202 13:14:39.944654 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" path="/var/lib/kubelet/pods/fbf2e605-6ef5-4f1d-bea6-22cf05aa3308/volumes" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.150905 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-t26td"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.155956 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-6sl92"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.160714 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-bootstrap-t26td"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.165633 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-db-sync-6sl92"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.170712 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.170998 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="27ae1308-7fc6-4a91-92d7-d52fefac9b17" containerName="mariadb-account-delete" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171020 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="27ae1308-7fc6-4a91-92d7-d52fefac9b17" containerName="mariadb-account-delete" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171033 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171043 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171054 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerName="manager" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171062 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerName="manager" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171073 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="manila-scheduler" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171080 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="manila-scheduler" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171096 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api-log" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171104 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api-log" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171118 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171126 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171137 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3daa09fb-f816-46c0-a43f-312554985188" containerName="registry-server" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171144 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3daa09fb-f816-46c0-a43f-312554985188" containerName="registry-server" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171156 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="manila-share" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171163 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="manila-share" Feb 02 13:14:41 crc kubenswrapper[4703]: E0202 13:14:41.171175 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171182 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171340 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="e9cf77c3-92e1-44dd-8390-be3e5845b011" containerName="manager" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171359 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="27ae1308-7fc6-4a91-92d7-d52fefac9b17" containerName="mariadb-account-delete" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171381 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171393 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="98006c61-e202-4f08-92f5-8805fb51d793" containerName="manila-scheduler" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171406 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="3daa09fb-f816-46c0-a43f-312554985188" containerName="registry-server" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171417 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="probe" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171426 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api-log" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171435 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="fbf2e605-6ef5-4f1d-bea6-22cf05aa3308" containerName="manila-api" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.171444 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="17628b62-3c9d-4d0e-9c33-92dfdf518d4c" containerName="manila-share" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.201241 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.201581 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" podUID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" containerName="keystone-api" containerID="cri-o://d2b8dbed2bc9f6377b0ce77e910f11799b70b4b86d5a0cc9bf494f36329fa105" gracePeriod=30 Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.201670 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.208143 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.317100 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-cmdww\" (UniqueName: \"kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.317186 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.418481 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.418580 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-cmdww\" (UniqueName: \"kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.419459 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.440624 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-cmdww\" (UniqueName: \"kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww\") pod \"keystone8b43-account-delete-w7gf2\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.537744 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.768396 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.794900 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/root-account-create-update-l9fxz"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.829358 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/root-account-create-update-l9fxz"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.837327 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.856892 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.865220 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.943331 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5f15ab71-7ffd-4ff7-83db-9a1bd624cb88" path="/var/lib/kubelet/pods/5f15ab71-7ffd-4ff7-83db-9a1bd624cb88/volumes" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.944112 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="77c94813-39fb-452c-b9f6-51a00bbe7e4e" path="/var/lib/kubelet/pods/77c94813-39fb-452c-b9f6-51a00bbe7e4e/volumes" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.944663 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c81d425d-d6cf-4521-a429-8febc3442348" path="/var/lib/kubelet/pods/c81d425d-d6cf-4521-a429-8febc3442348/volumes" Feb 02 13:14:41 crc kubenswrapper[4703]: I0202 13:14:41.997127 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-2" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="galera" containerID="cri-o://46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b" gracePeriod=30 Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.483343 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.483565 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/memcached-0" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerName="memcached" containerID="cri-o://1374f08e88226a477450c328cf22a3740fd88e1d8283aeb0727ff981782b3580" gracePeriod=30 Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.529453 4703 generic.go:334] "Generic (PLEG): container finished" podID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerID="6dd55a9f53965fdb3c1c5dac55e55bd1ecc8525ef0b89c318a00e2bb2512d42e" exitCode=1 Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.529545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" event={"ID":"4f35ec6a-f298-4049-a165-04dbcd1ce6b7","Type":"ContainerDied","Data":"6dd55a9f53965fdb3c1c5dac55e55bd1ecc8525ef0b89c318a00e2bb2512d42e"} Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.529941 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" event={"ID":"4f35ec6a-f298-4049-a165-04dbcd1ce6b7","Type":"ContainerStarted","Data":"cce47ae80ad2f194ab77ede7555a55b2a1a2021965f549b9ab97ab7bce3441a1"} Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.529958 4703 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" secret="" err="secret \"galera-openstack-dockercfg-kxjvv\" not found" Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.530070 4703 scope.go:117] "RemoveContainer" containerID="6dd55a9f53965fdb3c1c5dac55e55bd1ecc8525ef0b89c318a00e2bb2512d42e" Feb 02 13:14:42 crc kubenswrapper[4703]: E0202 13:14:42.639627 4703 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Feb 02 13:14:42 crc kubenswrapper[4703]: E0202 13:14:42.639723 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts podName:4f35ec6a-f298-4049-a165-04dbcd1ce6b7 nodeName:}" failed. No retries permitted until 2026-02-02 13:14:43.139704597 +0000 UTC m=+1410.154912131 (durationBeforeRetry 500ms). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts") pod "keystone8b43-account-delete-w7gf2" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7") : configmap "openstack-scripts" not found Feb 02 13:14:42 crc kubenswrapper[4703]: I0202 13:14:42.852897 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:14:43 crc kubenswrapper[4703]: E0202 13:14:43.164173 4703 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Feb 02 13:14:43 crc kubenswrapper[4703]: E0202 13:14:43.164549 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts podName:4f35ec6a-f298-4049-a165-04dbcd1ce6b7 nodeName:}" failed. No retries permitted until 2026-02-02 13:14:44.164531485 +0000 UTC m=+1411.179739019 (durationBeforeRetry 1s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts") pod "keystone8b43-account-delete-w7gf2" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7") : configmap "openstack-scripts" not found Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.184344 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.274705 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.366762 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.366823 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.366889 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.366977 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-6h57d\" (UniqueName: \"kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.367020 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.367047 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config\") pod \"e923beff-a23d-4f99-a05f-f48d59515e7e\" (UID: \"e923beff-a23d-4f99-a05f-f48d59515e7e\") " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.367550 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.367581 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.367781 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.368043 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.386016 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage02-crc" (OuterVolumeSpecName: "mysql-db") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "local-storage02-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.389427 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d" (OuterVolumeSpecName: "kube-api-access-6h57d") pod "e923beff-a23d-4f99-a05f-f48d59515e7e" (UID: "e923beff-a23d-4f99-a05f-f48d59515e7e"). InnerVolumeSpecName "kube-api-access-6h57d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468545 4703 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" " Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468588 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468603 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468616 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-6h57d\" (UniqueName: \"kubernetes.io/projected/e923beff-a23d-4f99-a05f-f48d59515e7e-kube-api-access-6h57d\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468628 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e923beff-a23d-4f99-a05f-f48d59515e7e-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.468640 4703 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e923beff-a23d-4f99-a05f-f48d59515e7e-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.482192 4703 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage02-crc" (UniqueName: "kubernetes.io/local-volume/local-storage02-crc") on node "crc" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.539182 4703 generic.go:334] "Generic (PLEG): container finished" podID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerID="0d62260cd950afce613e86a1e24e5fc6256dd3698f0229f3abf3adb1c1c1b8fc" exitCode=1 Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.539252 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" event={"ID":"4f35ec6a-f298-4049-a165-04dbcd1ce6b7","Type":"ContainerDied","Data":"0d62260cd950afce613e86a1e24e5fc6256dd3698f0229f3abf3adb1c1c1b8fc"} Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.539307 4703 scope.go:117] "RemoveContainer" containerID="6dd55a9f53965fdb3c1c5dac55e55bd1ecc8525ef0b89c318a00e2bb2512d42e" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.539822 4703 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" secret="" err="secret \"galera-openstack-dockercfg-kxjvv\" not found" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.539854 4703 scope.go:117] "RemoveContainer" containerID="0d62260cd950afce613e86a1e24e5fc6256dd3698f0229f3abf3adb1c1c1b8fc" Feb 02 13:14:43 crc kubenswrapper[4703]: E0202 13:14:43.540149 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone8b43-account-delete-w7gf2_manila-kuttl-tests(4f35ec6a-f298-4049-a165-04dbcd1ce6b7)\"" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.543955 4703 generic.go:334] "Generic (PLEG): container finished" podID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerID="1374f08e88226a477450c328cf22a3740fd88e1d8283aeb0727ff981782b3580" exitCode=0 Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.544025 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e","Type":"ContainerDied","Data":"1374f08e88226a477450c328cf22a3740fd88e1d8283aeb0727ff981782b3580"} Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.545819 4703 generic.go:334] "Generic (PLEG): container finished" podID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerID="46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b" exitCode=0 Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.546133 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-2" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.547817 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerDied","Data":"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b"} Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.547870 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-2" event={"ID":"e923beff-a23d-4f99-a05f-f48d59515e7e","Type":"ContainerDied","Data":"9ff581dd16a21a18c7456e0429f48de220898260ff4ff5192c59b7760535ee69"} Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.570007 4703 reconciler_common.go:293] "Volume detached for volume \"local-storage02-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage02-crc\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.597320 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.600552 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/rabbitmq-server-0" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="rabbitmq" containerID="cri-o://9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99" gracePeriod=604800 Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.604477 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-2"] Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.642427 4703 scope.go:117] "RemoveContainer" containerID="46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.667600 4703 scope.go:117] "RemoveContainer" containerID="951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.691951 4703 scope.go:117] "RemoveContainer" containerID="46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b" Feb 02 13:14:43 crc kubenswrapper[4703]: E0202 13:14:43.692642 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b\": container with ID starting with 46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b not found: ID does not exist" containerID="46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.692683 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b"} err="failed to get container status \"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b\": rpc error: code = NotFound desc = could not find container \"46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b\": container with ID starting with 46a41d2b8a61616c895b9e314a191c99290fd36a224b18adec4976daa1dd638b not found: ID does not exist" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.692712 4703 scope.go:117] "RemoveContainer" containerID="951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb" Feb 02 13:14:43 crc kubenswrapper[4703]: E0202 13:14:43.692976 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb\": container with ID starting with 951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb not found: ID does not exist" containerID="951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.693001 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb"} err="failed to get container status \"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb\": rpc error: code = NotFound desc = could not find container \"951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb\": container with ID starting with 951b42378f3f5b7254b61a2a032eed0860da01fad8e44f26b2c24bb5586bf2fb not found: ID does not exist" Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.787638 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/ceph"] Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.787853 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/ceph" podUID="4460b73f-1e46-424f-896a-64e152c5976c" containerName="ceph" containerID="cri-o://01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f" gracePeriod=30 Feb 02 13:14:43 crc kubenswrapper[4703]: I0202 13:14:43.956867 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" path="/var/lib/kubelet/pods/e923beff-a23d-4f99-a05f-f48d59515e7e/volumes" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.027784 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-1" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="galera" containerID="cri-o://01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed" gracePeriod=28 Feb 02 13:14:44 crc kubenswrapper[4703]: E0202 13:14:44.178301 4703 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Feb 02 13:14:44 crc kubenswrapper[4703]: E0202 13:14:44.178366 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts podName:4f35ec6a-f298-4049-a165-04dbcd1ce6b7 nodeName:}" failed. No retries permitted until 2026-02-02 13:14:46.178350451 +0000 UTC m=+1413.193557985 (durationBeforeRetry 2s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts") pod "keystone8b43-account-delete-w7gf2" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7") : configmap "openstack-scripts" not found Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.202763 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.280042 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config\") pod \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.280087 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data\") pod \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.280149 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rlk96\" (UniqueName: \"kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96\") pod \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\" (UID: \"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.280775 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data" (OuterVolumeSpecName: "config-data") pod "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" (UID: "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.280971 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" (UID: "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.285119 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96" (OuterVolumeSpecName: "kube-api-access-rlk96") pod "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" (UID: "85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e"). InnerVolumeSpecName "kube-api-access-rlk96". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.382100 4703 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.382164 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/configmap/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.382180 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rlk96\" (UniqueName: \"kubernetes.io/projected/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e-kube-api-access-rlk96\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.557666 4703 generic.go:334] "Generic (PLEG): container finished" podID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" containerID="d2b8dbed2bc9f6377b0ce77e910f11799b70b4b86d5a0cc9bf494f36329fa105" exitCode=0 Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.557762 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" event={"ID":"199d616b-b578-48ae-aad7-e4e2ed6f3d31","Type":"ContainerDied","Data":"d2b8dbed2bc9f6377b0ce77e910f11799b70b4b86d5a0cc9bf494f36329fa105"} Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.560075 4703 kubelet_pods.go:1007] "Unable to retrieve pull secret, the image pull may not succeed." pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" secret="" err="secret \"galera-openstack-dockercfg-kxjvv\" not found" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.560127 4703 scope.go:117] "RemoveContainer" containerID="0d62260cd950afce613e86a1e24e5fc6256dd3698f0229f3abf3adb1c1c1b8fc" Feb 02 13:14:44 crc kubenswrapper[4703]: E0202 13:14:44.560421 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"mariadb-account-delete\" with CrashLoopBackOff: \"back-off 10s restarting failed container=mariadb-account-delete pod=keystone8b43-account-delete-w7gf2_manila-kuttl-tests(4f35ec6a-f298-4049-a165-04dbcd1ce6b7)\"" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.561876 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/memcached-0" event={"ID":"85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e","Type":"ContainerDied","Data":"cf7ef50fdb91b61e86e04120438f7122bf7bbf5f7c9e2b488cf89f0d0f7c1879"} Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.561918 4703 scope.go:117] "RemoveContainer" containerID="1374f08e88226a477450c328cf22a3740fd88e1d8283aeb0727ff981782b3580" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.562011 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/memcached-0" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.631398 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.636819 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/memcached-0"] Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.722827 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.786778 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data\") pod \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.786834 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-mf6kk\" (UniqueName: \"kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk\") pod \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.786853 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys\") pod \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.786879 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts\") pod \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.786905 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys\") pod \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\" (UID: \"199d616b-b578-48ae-aad7-e4e2ed6f3d31\") " Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.791558 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys" (OuterVolumeSpecName: "credential-keys") pod "199d616b-b578-48ae-aad7-e4e2ed6f3d31" (UID: "199d616b-b578-48ae-aad7-e4e2ed6f3d31"). InnerVolumeSpecName "credential-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.792515 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts" (OuterVolumeSpecName: "scripts") pod "199d616b-b578-48ae-aad7-e4e2ed6f3d31" (UID: "199d616b-b578-48ae-aad7-e4e2ed6f3d31"). InnerVolumeSpecName "scripts". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.794423 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk" (OuterVolumeSpecName: "kube-api-access-mf6kk") pod "199d616b-b578-48ae-aad7-e4e2ed6f3d31" (UID: "199d616b-b578-48ae-aad7-e4e2ed6f3d31"). InnerVolumeSpecName "kube-api-access-mf6kk". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.808196 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data" (OuterVolumeSpecName: "config-data") pod "199d616b-b578-48ae-aad7-e4e2ed6f3d31" (UID: "199d616b-b578-48ae-aad7-e4e2ed6f3d31"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.811522 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys" (OuterVolumeSpecName: "fernet-keys") pod "199d616b-b578-48ae-aad7-e4e2ed6f3d31" (UID: "199d616b-b578-48ae-aad7-e4e2ed6f3d31"). InnerVolumeSpecName "fernet-keys". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.888391 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.888767 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-mf6kk\" (UniqueName: \"kubernetes.io/projected/199d616b-b578-48ae-aad7-e4e2ed6f3d31-kube-api-access-mf6kk\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.888805 4703 reconciler_common.go:293] "Volume detached for volume \"fernet-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-fernet-keys\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.888821 4703 reconciler_common.go:293] "Volume detached for volume \"scripts\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:44 crc kubenswrapper[4703]: I0202 13:14:44.888833 4703 reconciler_common.go:293] "Volume detached for volume \"credential-keys\" (UniqueName: \"kubernetes.io/secret/199d616b-b578-48ae-aad7-e4e2ed6f3d31-credential-keys\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.050717 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192056 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192104 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-h9hp6\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192134 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192173 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192206 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192263 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192413 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"persistence\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.192450 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret\") pod \"1cd5038e-d740-437c-8451-5e31dd2b2d10\" (UID: \"1cd5038e-d740-437c-8451-5e31dd2b2d10\") " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.193245 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf" (OuterVolumeSpecName: "plugins-conf") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "plugins-conf". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.193405 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie" (OuterVolumeSpecName: "rabbitmq-erlang-cookie") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "rabbitmq-erlang-cookie". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.193420 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins" (OuterVolumeSpecName: "rabbitmq-plugins") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "rabbitmq-plugins". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.196410 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6" (OuterVolumeSpecName: "kube-api-access-h9hp6") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "kube-api-access-h9hp6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.196528 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret" (OuterVolumeSpecName: "erlang-cookie-secret") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "erlang-cookie-secret". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.197397 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info" (OuterVolumeSpecName: "pod-info") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "pod-info". PluginName "kubernetes.io/downward-api", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.206123 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01" (OuterVolumeSpecName: "persistence") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01". PluginName "kubernetes.io/csi", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.251556 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd" (OuterVolumeSpecName: "rabbitmq-confd") pod "1cd5038e-d740-437c-8451-5e31dd2b2d10" (UID: "1cd5038e-d740-437c-8451-5e31dd2b2d10"). InnerVolumeSpecName "rabbitmq-confd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294142 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-h9hp6\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-kube-api-access-h9hp6\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294191 4703 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-erlang-cookie\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-erlang-cookie\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294205 4703 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-confd\" (UniqueName: \"kubernetes.io/projected/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-confd\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294218 4703 reconciler_common.go:293] "Volume detached for volume \"rabbitmq-plugins\" (UniqueName: \"kubernetes.io/empty-dir/1cd5038e-d740-437c-8451-5e31dd2b2d10-rabbitmq-plugins\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294234 4703 reconciler_common.go:293] "Volume detached for volume \"plugins-conf\" (UniqueName: \"kubernetes.io/configmap/1cd5038e-d740-437c-8451-5e31dd2b2d10-plugins-conf\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294245 4703 reconciler_common.go:293] "Volume detached for volume \"pod-info\" (UniqueName: \"kubernetes.io/downward-api/1cd5038e-d740-437c-8451-5e31dd2b2d10-pod-info\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294308 4703 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") on node \"crc\" " Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.294324 4703 reconciler_common.go:293] "Volume detached for volume \"erlang-cookie-secret\" (UniqueName: \"kubernetes.io/secret/1cd5038e-d740-437c-8451-5e31dd2b2d10-erlang-cookie-secret\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.311223 4703 csi_attacher.go:630] kubernetes.io/csi: attacher.UnmountDevice STAGE_UNSTAGE_VOLUME capability not set. Skipping UnmountDevice... Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.311446 4703 operation_generator.go:917] UnmountDevice succeeded for volume "pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01" (UniqueName: "kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01") on node "crc" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.395409 4703 reconciler_common.go:293] "Volume detached for volume \"pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\" (UniqueName: \"kubernetes.io/csi/kubevirt.io.hostpath-provisioner^pvc-2945eb8e-b1fb-4029-9304-cc5ea3c50d01\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.571477 4703 generic.go:334] "Generic (PLEG): container finished" podID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerID="9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99" exitCode=0 Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.571611 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/rabbitmq-server-0" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.571599 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerDied","Data":"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99"} Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.571692 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/rabbitmq-server-0" event={"ID":"1cd5038e-d740-437c-8451-5e31dd2b2d10","Type":"ContainerDied","Data":"28cec77f64946aee73adc51c1313afba8326ed75e495f658c119be4698610569"} Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.571719 4703 scope.go:117] "RemoveContainer" containerID="9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.576551 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" event={"ID":"199d616b-b578-48ae-aad7-e4e2ed6f3d31","Type":"ContainerDied","Data":"0fd4b74d0f263d25815c2ae237ccb59ba4f1a30bd5aca7687ea5a3f965dc1c67"} Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.576919 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone-5999f99c86-s96tg" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.634554 4703 scope.go:117] "RemoveContainer" containerID="268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.647780 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.670432 4703 scope.go:117] "RemoveContainer" containerID="9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99" Feb 02 13:14:45 crc kubenswrapper[4703]: E0202 13:14:45.671160 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99\": container with ID starting with 9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99 not found: ID does not exist" containerID="9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.671260 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99"} err="failed to get container status \"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99\": rpc error: code = NotFound desc = could not find container \"9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99\": container with ID starting with 9576f775fb2ab98026342364f00ca052ffdb84ce70f1786d5752be3a0cc35b99 not found: ID does not exist" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.671531 4703 scope.go:117] "RemoveContainer" containerID="268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.671757 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-5999f99c86-s96tg"] Feb 02 13:14:45 crc kubenswrapper[4703]: E0202 13:14:45.672148 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c\": container with ID starting with 268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c not found: ID does not exist" containerID="268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.672337 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c"} err="failed to get container status \"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c\": rpc error: code = NotFound desc = could not find container \"268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c\": container with ID starting with 268c9484e9246cb25dbe7931e973181acd0009bdff7d7ae07669d79e2bb3553c not found: ID does not exist" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.672404 4703 scope.go:117] "RemoveContainer" containerID="d2b8dbed2bc9f6377b0ce77e910f11799b70b4b86d5a0cc9bf494f36329fa105" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.679720 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.683923 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/rabbitmq-server-0"] Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.941460 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" path="/var/lib/kubelet/pods/199d616b-b578-48ae-aad7-e4e2ed6f3d31/volumes" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.942170 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" path="/var/lib/kubelet/pods/1cd5038e-d740-437c-8451-5e31dd2b2d10/volumes" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.942687 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" path="/var/lib/kubelet/pods/85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e/volumes" Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.985131 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:14:45 crc kubenswrapper[4703]: I0202 13:14:45.985201 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.072743 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="manila-kuttl-tests/openstack-galera-0" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="galera" containerID="cri-o://b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" gracePeriod=26 Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.186441 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pqgbn"] Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.197007 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-db-create-pqgbn"] Feb 02 13:14:46 crc kubenswrapper[4703]: E0202 13:14:46.210154 4703 configmap.go:193] Couldn't get configMap manila-kuttl-tests/openstack-scripts: configmap "openstack-scripts" not found Feb 02 13:14:46 crc kubenswrapper[4703]: E0202 13:14:46.210325 4703 nestedpendingoperations.go:348] Operation for "{volumeName:kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts podName:4f35ec6a-f298-4049-a165-04dbcd1ce6b7 nodeName:}" failed. No retries permitted until 2026-02-02 13:14:50.210296294 +0000 UTC m=+1417.225503828 (durationBeforeRetry 4s). Error: MountVolume.SetUp failed for volume "operator-scripts" (UniqueName: "kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts") pod "keystone8b43-account-delete-w7gf2" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7") : configmap "openstack-scripts" not found Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.212212 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.226474 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l"] Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.233503 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone-8b43-account-create-update-8dr7l"] Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.445545 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.585953 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" event={"ID":"4f35ec6a-f298-4049-a165-04dbcd1ce6b7","Type":"ContainerDied","Data":"cce47ae80ad2f194ab77ede7555a55b2a1a2021965f549b9ab97ab7bce3441a1"} Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.586023 4703 scope.go:117] "RemoveContainer" containerID="0d62260cd950afce613e86a1e24e5fc6256dd3698f0229f3abf3adb1c1c1b8fc" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.585973 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/keystone8b43-account-delete-w7gf2" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.615402 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts\") pod \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.616130 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-cmdww\" (UniqueName: \"kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww\") pod \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\" (UID: \"4f35ec6a-f298-4049-a165-04dbcd1ce6b7\") " Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.621644 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww" (OuterVolumeSpecName: "kube-api-access-cmdww") pod "4f35ec6a-f298-4049-a165-04dbcd1ce6b7" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7"). InnerVolumeSpecName "kube-api-access-cmdww". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.717631 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-cmdww\" (UniqueName: \"kubernetes.io/projected/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-kube-api-access-cmdww\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.772152 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "4f35ec6a-f298-4049-a165-04dbcd1ce6b7" (UID: "4f35ec6a-f298-4049-a165-04dbcd1ce6b7"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.819880 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/4f35ec6a-f298-4049-a165-04dbcd1ce6b7-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.919082 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:46 crc kubenswrapper[4703]: I0202 13:14:46.923901 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/keystone8b43-account-delete-w7gf2"] Feb 02 13:14:47 crc kubenswrapper[4703]: I0202 13:14:47.943770 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" path="/var/lib/kubelet/pods/4f35ec6a-f298-4049-a165-04dbcd1ce6b7/volumes" Feb 02 13:14:47 crc kubenswrapper[4703]: I0202 13:14:47.944747 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="aa79b809-2747-49db-bc5f-995a0b69ac29" path="/var/lib/kubelet/pods/aa79b809-2747-49db-bc5f-995a0b69ac29/volumes" Feb 02 13:14:47 crc kubenswrapper[4703]: I0202 13:14:47.945469 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ea4ff59a-3aba-4e33-9be4-be34ed09f4cb" path="/var/lib/kubelet/pods/ea4ff59a-3aba-4e33-9be4-be34ed09f4cb/volumes" Feb 02 13:14:47 crc kubenswrapper[4703]: I0202 13:14:47.998776 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.094197 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.095796 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.097137 4703 log.go:32] "ExecSync cmd from runtime service failed" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" cmd=["/bin/bash","/var/lib/operator-scripts/mysql_probe.sh","readiness"] Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.097195 4703 prober.go:104] "Probe errored" err="rpc error: code = Unknown desc = command error: cannot register an exec PID: container is stopping, stdout: , stderr: , exit code -1" probeType="Readiness" pod="manila-kuttl-tests/openstack-galera-0" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="galera" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.136898 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137172 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137256 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137351 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-p2nzd\" (UniqueName: \"kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137440 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137605 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.137695 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated\") pod \"e5582b3d-ce11-478d-b841-587e8d50dcd9\" (UID: \"e5582b3d-ce11-478d-b841-587e8d50dcd9\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.138421 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.139762 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.140621 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.142073 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.146383 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd" (OuterVolumeSpecName: "kube-api-access-p2nzd") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "kube-api-access-p2nzd". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.149453 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage01-crc" (OuterVolumeSpecName: "mysql-db") pod "e5582b3d-ce11-478d-b841-587e8d50dcd9" (UID: "e5582b3d-ce11-478d-b841-587e8d50dcd9"). InnerVolumeSpecName "local-storage01-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.240572 4703 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.240617 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-p2nzd\" (UniqueName: \"kubernetes.io/projected/e5582b3d-ce11-478d-b841-587e8d50dcd9-kube-api-access-p2nzd\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.240628 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.240669 4703 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/e5582b3d-ce11-478d-b841-587e8d50dcd9-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.240680 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/e5582b3d-ce11-478d-b841-587e8d50dcd9-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.254731 4703 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage01-crc" (UniqueName: "kubernetes.io/local-volume/local-storage01-crc") on node "crc" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.342294 4703 reconciler_common.go:293] "Volume detached for volume \"local-storage01-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage01-crc\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.456511 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544311 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544425 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544525 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-vgd84\" (UniqueName: \"kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544554 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544588 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.544619 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"mysql-db\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") pod \"fcf04156-6efa-4399-832a-aabe98bde6e3\" (UID: \"fcf04156-6efa-4399-832a-aabe98bde6e3\") " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545319 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config" (OuterVolumeSpecName: "kolla-config") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "kolla-config". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545521 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated" (OuterVolumeSpecName: "config-data-generated") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "config-data-generated". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545592 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default" (OuterVolumeSpecName: "config-data-default") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "config-data-default". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545624 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts" (OuterVolumeSpecName: "operator-scripts") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "operator-scripts". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545823 4703 reconciler_common.go:293] "Volume detached for volume \"kolla-config\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-kolla-config\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545850 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-generated\" (UniqueName: \"kubernetes.io/empty-dir/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-generated\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545866 4703 reconciler_common.go:293] "Volume detached for volume \"config-data-default\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-config-data-default\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.545878 4703 reconciler_common.go:293] "Volume detached for volume \"operator-scripts\" (UniqueName: \"kubernetes.io/configmap/fcf04156-6efa-4399-832a-aabe98bde6e3-operator-scripts\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.548078 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84" (OuterVolumeSpecName: "kube-api-access-vgd84") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "kube-api-access-vgd84". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.553372 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/local-volume/local-storage11-crc" (OuterVolumeSpecName: "mysql-db") pod "fcf04156-6efa-4399-832a-aabe98bde6e3" (UID: "fcf04156-6efa-4399-832a-aabe98bde6e3"). InnerVolumeSpecName "local-storage11-crc". PluginName "kubernetes.io/local-volume", VolumeGidValue "" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.607010 4703 generic.go:334] "Generic (PLEG): container finished" podID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerID="01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed" exitCode=0 Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.607086 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerDied","Data":"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed"} Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.607089 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-1" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.607126 4703 scope.go:117] "RemoveContainer" containerID="01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.607114 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-1" event={"ID":"e5582b3d-ce11-478d-b841-587e8d50dcd9","Type":"ContainerDied","Data":"80a0bf2ed11ac8b1b7ecf06257a4225dab3b4136277c403de5e9eb87001317ef"} Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.608817 4703 generic.go:334] "Generic (PLEG): container finished" podID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" exitCode=0 Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.608879 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerDied","Data":"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9"} Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.608917 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/openstack-galera-0" event={"ID":"fcf04156-6efa-4399-832a-aabe98bde6e3","Type":"ContainerDied","Data":"773d64b87ef2d67e6422701aba6a0343d8108bc70d19a4fa7b6f2fbbeff93d5d"} Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.608898 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/openstack-galera-0" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.635466 4703 scope.go:117] "RemoveContainer" containerID="68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.647972 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.662901 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-1"] Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.667412 4703 scope.go:117] "RemoveContainer" containerID="01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.663073 4703 reconciler_common.go:286] "operationExecutor.UnmountDevice started for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" " Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.667610 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-vgd84\" (UniqueName: \"kubernetes.io/projected/fcf04156-6efa-4399-832a-aabe98bde6e3-kube-api-access-vgd84\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.668187 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed\": container with ID starting with 01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed not found: ID does not exist" containerID="01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.668217 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed"} err="failed to get container status \"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed\": rpc error: code = NotFound desc = could not find container \"01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed\": container with ID starting with 01973f6b28035851c22f2fa155174b442b4297b6ebe5298bda9ca158b2cc31ed not found: ID does not exist" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.668240 4703 scope.go:117] "RemoveContainer" containerID="68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e" Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.669509 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e\": container with ID starting with 68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e not found: ID does not exist" containerID="68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.669539 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e"} err="failed to get container status \"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e\": rpc error: code = NotFound desc = could not find container \"68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e\": container with ID starting with 68bf4fc272b671f6cbc55eeb08e51b3126bfb64f582ad58b1f13dedf73acdb3e not found: ID does not exist" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.669554 4703 scope.go:117] "RemoveContainer" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.674702 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.674743 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/openstack-galera-0"] Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.676183 4703 operation_generator.go:917] UnmountDevice succeeded for volume "local-storage11-crc" (UniqueName: "kubernetes.io/local-volume/local-storage11-crc") on node "crc" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.686740 4703 scope.go:117] "RemoveContainer" containerID="77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.702405 4703 scope.go:117] "RemoveContainer" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.703249 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9\": container with ID starting with b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9 not found: ID does not exist" containerID="b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.703294 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9"} err="failed to get container status \"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9\": rpc error: code = NotFound desc = could not find container \"b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9\": container with ID starting with b874f27a67834c88f4a3b8c498b03320c2e8801d5887c641d122ab1b90ac74e9 not found: ID does not exist" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.703316 4703 scope.go:117] "RemoveContainer" containerID="77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5" Feb 02 13:14:48 crc kubenswrapper[4703]: E0202 13:14:48.703582 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5\": container with ID starting with 77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5 not found: ID does not exist" containerID="77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.703602 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5"} err="failed to get container status \"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5\": rpc error: code = NotFound desc = could not find container \"77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5\": container with ID starting with 77649020ecc3e1c06e2e223d3cc71b004722b4bb5e4ada17b382181283d7a8e5 not found: ID does not exist" Feb 02 13:14:48 crc kubenswrapper[4703]: I0202 13:14:48.770937 4703 reconciler_common.go:293] "Volume detached for volume \"local-storage11-crc\" (UniqueName: \"kubernetes.io/local-volume/local-storage11-crc\") on node \"crc\" DevicePath \"\"" Feb 02 13:14:49 crc kubenswrapper[4703]: I0202 13:14:49.147676 4703 prober.go:107] "Probe failed" probeType="Readiness" pod="manila-kuttl-tests/memcached-0" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerName="memcached" probeResult="failure" output="dial tcp 10.217.0.54:11211: i/o timeout" Feb 02 13:14:49 crc kubenswrapper[4703]: I0202 13:14:49.943208 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" path="/var/lib/kubelet/pods/e5582b3d-ce11-478d-b841-587e8d50dcd9/volumes" Feb 02 13:14:49 crc kubenswrapper[4703]: I0202 13:14:49.943845 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" path="/var/lib/kubelet/pods/fcf04156-6efa-4399-832a-aabe98bde6e3/volumes" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.137251 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6"] Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138082 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138100 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138110 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138119 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138130 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138139 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138153 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerName="memcached" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138160 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerName="memcached" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138171 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138178 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138195 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138202 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138211 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138218 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138232 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="rabbitmq" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138239 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="rabbitmq" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138251 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="setup-container" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138257 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="setup-container" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138289 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138297 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="mysql-bootstrap" Feb 02 13:15:00 crc kubenswrapper[4703]: E0202 13:15:00.138311 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" containerName="keystone-api" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138318 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" containerName="keystone-api" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138444 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="85e64c96-dfa6-4c6b-8a8c-bc3182e3a66e" containerName="memcached" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138454 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138465 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="fcf04156-6efa-4399-832a-aabe98bde6e3" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138481 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="e5582b3d-ce11-478d-b841-587e8d50dcd9" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138489 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138499 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="1cd5038e-d740-437c-8451-5e31dd2b2d10" containerName="rabbitmq" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138509 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="e923beff-a23d-4f99-a05f-f48d59515e7e" containerName="galera" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.138518 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="199d616b-b578-48ae-aad7-e4e2ed6f3d31" containerName="keystone-api" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.139011 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.144219 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-operator-lifecycle-manager"/"collect-profiles-config" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.144343 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-operator-lifecycle-manager"/"collect-profiles-dockercfg-kzf4t" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.156415 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6"] Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.221843 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.222026 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-x2rxb\" (UniqueName: \"kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.222087 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.322974 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-x2rxb\" (UniqueName: \"kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.323071 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.323151 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.324374 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.329502 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.342785 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-x2rxb\" (UniqueName: \"kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb\") pod \"collect-profiles-29500635-rgdp6\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.467596 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:00 crc kubenswrapper[4703]: I0202 13:15:00.917690 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6"] Feb 02 13:15:01 crc kubenswrapper[4703]: I0202 13:15:01.698468 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" event={"ID":"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb","Type":"ContainerStarted","Data":"e05ecca069b98da422c90a7e743ee70552f59de3c2ab32eaa0be57427fc8bedd"} Feb 02 13:15:01 crc kubenswrapper[4703]: I0202 13:15:01.698520 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" event={"ID":"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb","Type":"ContainerStarted","Data":"23938faad5ef1a8b03e09613755a44ae08cc6b4ed2202bbb17991d3ee636063b"} Feb 02 13:15:01 crc kubenswrapper[4703]: I0202 13:15:01.719803 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" podStartSLOduration=1.719788313 podStartE2EDuration="1.719788313s" podCreationTimestamp="2026-02-02 13:15:00 +0000 UTC" firstStartedPulling="0001-01-01 00:00:00 +0000 UTC" lastFinishedPulling="0001-01-01 00:00:00 +0000 UTC" observedRunningTime="2026-02-02 13:15:01.713051232 +0000 UTC m=+1428.728258766" watchObservedRunningTime="2026-02-02 13:15:01.719788313 +0000 UTC m=+1428.734995847" Feb 02 13:15:02 crc kubenswrapper[4703]: I0202 13:15:02.707208 4703 generic.go:334] "Generic (PLEG): container finished" podID="eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" containerID="e05ecca069b98da422c90a7e743ee70552f59de3c2ab32eaa0be57427fc8bedd" exitCode=0 Feb 02 13:15:02 crc kubenswrapper[4703]: I0202 13:15:02.707320 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" event={"ID":"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb","Type":"ContainerDied","Data":"e05ecca069b98da422c90a7e743ee70552f59de3c2ab32eaa0be57427fc8bedd"} Feb 02 13:15:03 crc kubenswrapper[4703]: I0202 13:15:03.979120 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.074304 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-x2rxb\" (UniqueName: \"kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb\") pod \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.074440 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume\") pod \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.074495 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume\") pod \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\" (UID: \"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb\") " Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.076196 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume" (OuterVolumeSpecName: "config-volume") pod "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" (UID: "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb"). InnerVolumeSpecName "config-volume". PluginName "kubernetes.io/configmap", VolumeGidValue "" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.080352 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb" (OuterVolumeSpecName: "kube-api-access-x2rxb") pod "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" (UID: "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb"). InnerVolumeSpecName "kube-api-access-x2rxb". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.085350 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume" (OuterVolumeSpecName: "secret-volume") pod "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" (UID: "eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb"). InnerVolumeSpecName "secret-volume". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.175910 4703 reconciler_common.go:293] "Volume detached for volume \"config-volume\" (UniqueName: \"kubernetes.io/configmap/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-config-volume\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.175946 4703 reconciler_common.go:293] "Volume detached for volume \"secret-volume\" (UniqueName: \"kubernetes.io/secret/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-secret-volume\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.175956 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-x2rxb\" (UniqueName: \"kubernetes.io/projected/eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb-kube-api-access-x2rxb\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.722715 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" event={"ID":"eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb","Type":"ContainerDied","Data":"23938faad5ef1a8b03e09613755a44ae08cc6b4ed2202bbb17991d3ee636063b"} Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.722751 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="23938faad5ef1a8b03e09613755a44ae08cc6b4ed2202bbb17991d3ee636063b" Feb 02 13:15:04 crc kubenswrapper[4703]: I0202 13:15:04.722810 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-operator-lifecycle-manager/collect-profiles-29500635-rgdp6" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.730557 4703 generic.go:334] "Generic (PLEG): container finished" podID="6a995ed1-b4b5-43d3-91cf-2f461916e342" containerID="577887e8178672ae76d1175143f549e287d8a12ad99c51f5ad101e0260d6f4dd" exitCode=137 Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.730594 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" event={"ID":"6a995ed1-b4b5-43d3-91cf-2f461916e342","Type":"ContainerDied","Data":"577887e8178672ae76d1175143f549e287d8a12ad99c51f5ad101e0260d6f4dd"} Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.730621 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" event={"ID":"6a995ed1-b4b5-43d3-91cf-2f461916e342","Type":"ContainerDied","Data":"582f16779ad184d11afece6c8063b66fe75957aa9f369b5e6f641c3f87009c89"} Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.730633 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="582f16779ad184d11afece6c8063b66fe75957aa9f369b5e6f641c3f87009c89" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.767405 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.799979 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data\") pod \"6a995ed1-b4b5-43d3-91cf-2f461916e342\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.800033 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-rd96m\" (UniqueName: \"kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m\") pod \"6a995ed1-b4b5-43d3-91cf-2f461916e342\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.800050 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data\") pod \"6a995ed1-b4b5-43d3-91cf-2f461916e342\" (UID: \"6a995ed1-b4b5-43d3-91cf-2f461916e342\") " Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.805323 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data" (OuterVolumeSpecName: "job-config-data") pod "6a995ed1-b4b5-43d3-91cf-2f461916e342" (UID: "6a995ed1-b4b5-43d3-91cf-2f461916e342"). InnerVolumeSpecName "job-config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.805351 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m" (OuterVolumeSpecName: "kube-api-access-rd96m") pod "6a995ed1-b4b5-43d3-91cf-2f461916e342" (UID: "6a995ed1-b4b5-43d3-91cf-2f461916e342"). InnerVolumeSpecName "kube-api-access-rd96m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.808722 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data" (OuterVolumeSpecName: "config-data") pod "6a995ed1-b4b5-43d3-91cf-2f461916e342" (UID: "6a995ed1-b4b5-43d3-91cf-2f461916e342"). InnerVolumeSpecName "config-data". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.901400 4703 reconciler_common.go:293] "Volume detached for volume \"config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.901725 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-rd96m\" (UniqueName: \"kubernetes.io/projected/6a995ed1-b4b5-43d3-91cf-2f461916e342-kube-api-access-rd96m\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:05 crc kubenswrapper[4703]: I0202 13:15:05.901740 4703 reconciler_common.go:293] "Volume detached for volume \"job-config-data\" (UniqueName: \"kubernetes.io/secret/6a995ed1-b4b5-43d3-91cf-2f461916e342-job-config-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:06 crc kubenswrapper[4703]: I0202 13:15:06.788437 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr" Feb 02 13:15:06 crc kubenswrapper[4703]: I0202 13:15:06.808281 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:15:06 crc kubenswrapper[4703]: I0202 13:15:06.813442 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/manila-service-cleanup-n5b5h655-4tsfr"] Feb 02 13:15:07 crc kubenswrapper[4703]: I0202 13:15:07.945256 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6a995ed1-b4b5-43d3-91cf-2f461916e342" path="/var/lib/kubelet/pods/6a995ed1-b4b5-43d3-91cf-2f461916e342/volumes" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.432831 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.527664 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-46ngm\" (UniqueName: \"kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm\") pod \"4460b73f-1e46-424f-896a-64e152c5976c\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.527753 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log\") pod \"4460b73f-1e46-424f-896a-64e152c5976c\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.527783 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data\") pod \"4460b73f-1e46-424f-896a-64e152c5976c\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.527808 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run\") pod \"4460b73f-1e46-424f-896a-64e152c5976c\" (UID: \"4460b73f-1e46-424f-896a-64e152c5976c\") " Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.528632 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log" (OuterVolumeSpecName: "log") pod "4460b73f-1e46-424f-896a-64e152c5976c" (UID: "4460b73f-1e46-424f-896a-64e152c5976c"). InnerVolumeSpecName "log". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.529146 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run" (OuterVolumeSpecName: "run") pod "4460b73f-1e46-424f-896a-64e152c5976c" (UID: "4460b73f-1e46-424f-896a-64e152c5976c"). InnerVolumeSpecName "run". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.535095 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data" (OuterVolumeSpecName: "data") pod "4460b73f-1e46-424f-896a-64e152c5976c" (UID: "4460b73f-1e46-424f-896a-64e152c5976c"). InnerVolumeSpecName "data". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.535444 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm" (OuterVolumeSpecName: "kube-api-access-46ngm") pod "4460b73f-1e46-424f-896a-64e152c5976c" (UID: "4460b73f-1e46-424f-896a-64e152c5976c"). InnerVolumeSpecName "kube-api-access-46ngm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.628955 4703 reconciler_common.go:293] "Volume detached for volume \"log\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-log\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.629072 4703 reconciler_common.go:293] "Volume detached for volume \"data\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-data\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.629090 4703 reconciler_common.go:293] "Volume detached for volume \"run\" (UniqueName: \"kubernetes.io/empty-dir/4460b73f-1e46-424f-896a-64e152c5976c-run\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.629103 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-46ngm\" (UniqueName: \"kubernetes.io/projected/4460b73f-1e46-424f-896a-64e152c5976c-kube-api-access-46ngm\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.843334 4703 generic.go:334] "Generic (PLEG): container finished" podID="4460b73f-1e46-424f-896a-64e152c5976c" containerID="01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f" exitCode=137 Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.843375 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"4460b73f-1e46-424f-896a-64e152c5976c","Type":"ContainerDied","Data":"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f"} Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.843395 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="manila-kuttl-tests/ceph" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.843418 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="manila-kuttl-tests/ceph" event={"ID":"4460b73f-1e46-424f-896a-64e152c5976c","Type":"ContainerDied","Data":"59ffecb5ff42fad05e7c6386311f97cccabb0213aaf62872a30f9917c51845fb"} Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.843434 4703 scope.go:117] "RemoveContainer" containerID="01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.866953 4703 scope.go:117] "RemoveContainer" containerID="01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f" Feb 02 13:15:14 crc kubenswrapper[4703]: E0202 13:15:14.869606 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f\": container with ID starting with 01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f not found: ID does not exist" containerID="01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.869653 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f"} err="failed to get container status \"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f\": rpc error: code = NotFound desc = could not find container \"01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f\": container with ID starting with 01ea6eac517b7bae5ecdf5607f0faf2d5373c57792ba25e561cf85274221908f not found: ID does not exist" Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.873182 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["manila-kuttl-tests/ceph"] Feb 02 13:15:14 crc kubenswrapper[4703]: I0202 13:15:14.877752 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["manila-kuttl-tests/ceph"] Feb 02 13:15:15 crc kubenswrapper[4703]: I0202 13:15:15.952592 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4460b73f-1e46-424f-896a-64e152c5976c" path="/var/lib/kubelet/pods/4460b73f-1e46-424f-896a-64e152c5976c/volumes" Feb 02 13:15:15 crc kubenswrapper[4703]: I0202 13:15:15.984987 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:15:15 crc kubenswrapper[4703]: I0202 13:15:15.985043 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.104207 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.104787 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" podUID="86515156-fe01-412d-a10d-16ba26cfb8f8" containerName="manager" containerID="cri-o://72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158" gracePeriod=10 Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.347315 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.347759 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/keystone-operator-index-7v4d4" podUID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" containerName="registry-server" containerID="cri-o://d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a" gracePeriod=30 Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.384595 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.391602 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/b43f19b8e3bb8997a527070b172ae030accff9cd1a2f2b076f58d9c4efx5jtx"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.511981 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.557932 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-dwc7m\" (UniqueName: \"kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m\") pod \"86515156-fe01-412d-a10d-16ba26cfb8f8\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.558142 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert\") pod \"86515156-fe01-412d-a10d-16ba26cfb8f8\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.558195 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert\") pod \"86515156-fe01-412d-a10d-16ba26cfb8f8\" (UID: \"86515156-fe01-412d-a10d-16ba26cfb8f8\") " Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.564435 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m" (OuterVolumeSpecName: "kube-api-access-dwc7m") pod "86515156-fe01-412d-a10d-16ba26cfb8f8" (UID: "86515156-fe01-412d-a10d-16ba26cfb8f8"). InnerVolumeSpecName "kube-api-access-dwc7m". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.565510 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "86515156-fe01-412d-a10d-16ba26cfb8f8" (UID: "86515156-fe01-412d-a10d-16ba26cfb8f8"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.577669 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "86515156-fe01-412d-a10d-16ba26cfb8f8" (UID: "86515156-fe01-412d-a10d-16ba26cfb8f8"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.659820 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-dwc7m\" (UniqueName: \"kubernetes.io/projected/86515156-fe01-412d-a10d-16ba26cfb8f8-kube-api-access-dwc7m\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.660084 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.660094 4703 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/86515156-fe01-412d-a10d-16ba26cfb8f8-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.698847 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.760673 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4tvm4\" (UniqueName: \"kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4\") pod \"3b5301e6-77c6-4fd1-a97c-8ec7be30d794\" (UID: \"3b5301e6-77c6-4fd1-a97c-8ec7be30d794\") " Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.763756 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4" (OuterVolumeSpecName: "kube-api-access-4tvm4") pod "3b5301e6-77c6-4fd1-a97c-8ec7be30d794" (UID: "3b5301e6-77c6-4fd1-a97c-8ec7be30d794"). InnerVolumeSpecName "kube-api-access-4tvm4". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.858045 4703 generic.go:334] "Generic (PLEG): container finished" podID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" containerID="d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a" exitCode=0 Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.858087 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-index-7v4d4" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.858108 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-7v4d4" event={"ID":"3b5301e6-77c6-4fd1-a97c-8ec7be30d794","Type":"ContainerDied","Data":"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a"} Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.858143 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-index-7v4d4" event={"ID":"3b5301e6-77c6-4fd1-a97c-8ec7be30d794","Type":"ContainerDied","Data":"c3e0a0fdeb8249bb57ebb28973662325900338ff3c283ad346d632d69c17c9ef"} Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.858189 4703 scope.go:117] "RemoveContainer" containerID="d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.860534 4703 generic.go:334] "Generic (PLEG): container finished" podID="86515156-fe01-412d-a10d-16ba26cfb8f8" containerID="72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158" exitCode=0 Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.860576 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" event={"ID":"86515156-fe01-412d-a10d-16ba26cfb8f8","Type":"ContainerDied","Data":"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158"} Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.860630 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" event={"ID":"86515156-fe01-412d-a10d-16ba26cfb8f8","Type":"ContainerDied","Data":"82730dc3c4b95ccf9bc3cc67a59af6d50d32defeddaa016255df49401cb9e69d"} Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.860643 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.861896 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4tvm4\" (UniqueName: \"kubernetes.io/projected/3b5301e6-77c6-4fd1-a97c-8ec7be30d794-kube-api-access-4tvm4\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.892501 4703 scope.go:117] "RemoveContainer" containerID="d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.894432 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:15:16 crc kubenswrapper[4703]: E0202 13:15:16.896508 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a\": container with ID starting with d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a not found: ID does not exist" containerID="d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.896550 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a"} err="failed to get container status \"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a\": rpc error: code = NotFound desc = could not find container \"d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a\": container with ID starting with d92d07d7f0c10be1b265152d0e398dd1327a770126d1c3f684084b1a53f0710a not found: ID does not exist" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.896913 4703 scope.go:117] "RemoveContainer" containerID="72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.905001 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-index-7v4d4"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.917702 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.919181 4703 scope.go:117] "RemoveContainer" containerID="72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158" Feb 02 13:15:16 crc kubenswrapper[4703]: E0202 13:15:16.919588 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158\": container with ID starting with 72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158 not found: ID does not exist" containerID="72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.919626 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158"} err="failed to get container status \"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158\": rpc error: code = NotFound desc = could not find container \"72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158\": container with ID starting with 72878d1bfcc52bfac091f8eb32cf87bf7d27dfc605e8013d027a831a4f09a158 not found: ID does not exist" Feb 02 13:15:16 crc kubenswrapper[4703]: I0202 13:15:16.921995 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/keystone-operator-controller-manager-64d7687b4f-627k6"] Feb 02 13:15:17 crc kubenswrapper[4703]: I0202 13:15:17.942041 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" path="/var/lib/kubelet/pods/3b5301e6-77c6-4fd1-a97c-8ec7be30d794/volumes" Feb 02 13:15:17 crc kubenswrapper[4703]: I0202 13:15:17.942601 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="86515156-fe01-412d-a10d-16ba26cfb8f8" path="/var/lib/kubelet/pods/86515156-fe01-412d-a10d-16ba26cfb8f8/volumes" Feb 02 13:15:17 crc kubenswrapper[4703]: I0202 13:15:17.943125 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="949ed635-1fc0-4037-8d5a-08817761db87" path="/var/lib/kubelet/pods/949ed635-1fc0-4037-8d5a-08817761db87/volumes" Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.516314 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.516555 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" podUID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" containerName="operator" containerID="cri-o://9a453d5555c17faa4dda9b09d374ce9c18449d8a777be0db00b321b90d77356e" gracePeriod=10 Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.779500 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.779941 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" podUID="01565f9f-8074-451e-9ebd-1b94124e364d" containerName="registry-server" containerID="cri-o://e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478" gracePeriod=30 Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.820259 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf"] Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.847167 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/9704761d240e56fb98655ffd81084895b33a73ec711f4dcdef0450e590nvfvf"] Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.883451 4703 generic.go:334] "Generic (PLEG): container finished" podID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" containerID="9a453d5555c17faa4dda9b09d374ce9c18449d8a777be0db00b321b90d77356e" exitCode=0 Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.883568 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" event={"ID":"f3e89261-c5b4-429d-b84e-4a89fa66b98b","Type":"ContainerDied","Data":"9a453d5555c17faa4dda9b09d374ce9c18449d8a777be0db00b321b90d77356e"} Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.886693 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" event={"ID":"f3e89261-c5b4-429d-b84e-4a89fa66b98b","Type":"ContainerDied","Data":"7d552f058d366cbbe5f08c4e8a59624f0b839ebee56580079e1338aefa9872a7"} Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.886850 4703 pod_container_deletor.go:80] "Container not found in pod's containers" containerID="7d552f058d366cbbe5f08c4e8a59624f0b839ebee56580079e1338aefa9872a7" Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.927507 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.989480 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-fbknm\" (UniqueName: \"kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm\") pod \"f3e89261-c5b4-429d-b84e-4a89fa66b98b\" (UID: \"f3e89261-c5b4-429d-b84e-4a89fa66b98b\") " Feb 02 13:15:18 crc kubenswrapper[4703]: I0202 13:15:18.995045 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm" (OuterVolumeSpecName: "kube-api-access-fbknm") pod "f3e89261-c5b4-429d-b84e-4a89fa66b98b" (UID: "f3e89261-c5b4-429d-b84e-4a89fa66b98b"). InnerVolumeSpecName "kube-api-access-fbknm". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.092333 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-fbknm\" (UniqueName: \"kubernetes.io/projected/f3e89261-c5b4-429d-b84e-4a89fa66b98b-kube-api-access-fbknm\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.161255 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.193478 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-nwgdq\" (UniqueName: \"kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq\") pod \"01565f9f-8074-451e-9ebd-1b94124e364d\" (UID: \"01565f9f-8074-451e-9ebd-1b94124e364d\") " Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.196552 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq" (OuterVolumeSpecName: "kube-api-access-nwgdq") pod "01565f9f-8074-451e-9ebd-1b94124e364d" (UID: "01565f9f-8074-451e-9ebd-1b94124e364d"). InnerVolumeSpecName "kube-api-access-nwgdq". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.294666 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-nwgdq\" (UniqueName: \"kubernetes.io/projected/01565f9f-8074-451e-9ebd-1b94124e364d-kube-api-access-nwgdq\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.892541 4703 generic.go:334] "Generic (PLEG): container finished" podID="01565f9f-8074-451e-9ebd-1b94124e364d" containerID="e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478" exitCode=0 Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.892584 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" event={"ID":"01565f9f-8074-451e-9ebd-1b94124e364d","Type":"ContainerDied","Data":"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478"} Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.892627 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.892637 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" event={"ID":"01565f9f-8074-451e-9ebd-1b94124e364d","Type":"ContainerDied","Data":"8533ecb6e1796d7efbfd15b012b447617b3ac84ff47423ad9be32c40879aa379"} Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.892659 4703 scope.go:117] "RemoveContainer" containerID="e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.893202 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/rabbitmq-cluster-operator-index-f7km8" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.919109 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.924369 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-779fc9694b-hlt8r"] Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.928053 4703 scope.go:117] "RemoveContainer" containerID="e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478" Feb 02 13:15:19 crc kubenswrapper[4703]: E0202 13:15:19.928723 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478\": container with ID starting with e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478 not found: ID does not exist" containerID="e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.928766 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478"} err="failed to get container status \"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478\": rpc error: code = NotFound desc = could not find container \"e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478\": container with ID starting with e40e8b01fcbec891287f141de1eabb1229f331c4a9b4a1edde54442ecabda478 not found: ID does not exist" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.940845 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="277808c1-860f-4b6e-99a2-ad8d6031d334" path="/var/lib/kubelet/pods/277808c1-860f-4b6e-99a2-ad8d6031d334/volumes" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.941610 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" path="/var/lib/kubelet/pods/f3e89261-c5b4-429d-b84e-4a89fa66b98b/volumes" Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.942079 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:15:19 crc kubenswrapper[4703]: I0202 13:15:19.942113 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/rabbitmq-cluster-operator-index-f7km8"] Feb 02 13:15:21 crc kubenswrapper[4703]: I0202 13:15:21.941620 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="01565f9f-8074-451e-9ebd-1b94124e364d" path="/var/lib/kubelet/pods/01565f9f-8074-451e-9ebd-1b94124e364d/volumes" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.081458 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.082029 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" podUID="1a37f2c9-188b-4c10-ac43-035262781444" containerName="manager" containerID="cri-o://051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b" gracePeriod=10 Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.333986 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.334255 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/infra-operator-index-8d6lm" podUID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" containerName="registry-server" containerID="cri-o://7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2" gracePeriod=30 Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.359124 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.366567 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/d7c3b59ed6c2e571e21460d743e5fcd0c5f76cb7c446e474a3d05f75766s4cp"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.615202 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.693732 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.755895 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5kkjp\" (UniqueName: \"kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp\") pod \"1a37f2c9-188b-4c10-ac43-035262781444\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.756012 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert\") pod \"1a37f2c9-188b-4c10-ac43-035262781444\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.756142 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert\") pod \"1a37f2c9-188b-4c10-ac43-035262781444\" (UID: \"1a37f2c9-188b-4c10-ac43-035262781444\") " Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.762826 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "1a37f2c9-188b-4c10-ac43-035262781444" (UID: "1a37f2c9-188b-4c10-ac43-035262781444"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.762832 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp" (OuterVolumeSpecName: "kube-api-access-5kkjp") pod "1a37f2c9-188b-4c10-ac43-035262781444" (UID: "1a37f2c9-188b-4c10-ac43-035262781444"). InnerVolumeSpecName "kube-api-access-5kkjp". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.763928 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "1a37f2c9-188b-4c10-ac43-035262781444" (UID: "1a37f2c9-188b-4c10-ac43-035262781444"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.858791 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-xtq5d\" (UniqueName: \"kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d\") pod \"a5197757-55f2-4c6b-96a4-f3c838e0ea9f\" (UID: \"a5197757-55f2-4c6b-96a4-f3c838e0ea9f\") " Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.859078 4703 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.859094 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5kkjp\" (UniqueName: \"kubernetes.io/projected/1a37f2c9-188b-4c10-ac43-035262781444-kube-api-access-5kkjp\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.859104 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/1a37f2c9-188b-4c10-ac43-035262781444-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.861717 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d" (OuterVolumeSpecName: "kube-api-access-xtq5d") pod "a5197757-55f2-4c6b-96a4-f3c838e0ea9f" (UID: "a5197757-55f2-4c6b-96a4-f3c838e0ea9f"). InnerVolumeSpecName "kube-api-access-xtq5d". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.925130 4703 generic.go:334] "Generic (PLEG): container finished" podID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" containerID="7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2" exitCode=0 Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.925182 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-8d6lm" event={"ID":"a5197757-55f2-4c6b-96a4-f3c838e0ea9f","Type":"ContainerDied","Data":"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2"} Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.925223 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-index-8d6lm" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.925227 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-index-8d6lm" event={"ID":"a5197757-55f2-4c6b-96a4-f3c838e0ea9f","Type":"ContainerDied","Data":"467a46d466cb73c3726306dab1cc8c5835637ff2ac9bfea0cb2b26a110418005"} Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.925240 4703 scope.go:117] "RemoveContainer" containerID="7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.926561 4703 generic.go:334] "Generic (PLEG): container finished" podID="1a37f2c9-188b-4c10-ac43-035262781444" containerID="051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b" exitCode=0 Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.926607 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.926615 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" event={"ID":"1a37f2c9-188b-4c10-ac43-035262781444","Type":"ContainerDied","Data":"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b"} Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.926652 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm" event={"ID":"1a37f2c9-188b-4c10-ac43-035262781444","Type":"ContainerDied","Data":"463ab6c74c08eed08129bcb3a7fc8d0ae649a879c68310137e474aa0d349c552"} Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.951250 4703 scope.go:117] "RemoveContainer" containerID="7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2" Feb 02 13:15:24 crc kubenswrapper[4703]: E0202 13:15:24.957465 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2\": container with ID starting with 7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2 not found: ID does not exist" containerID="7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.957518 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2"} err="failed to get container status \"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2\": rpc error: code = NotFound desc = could not find container \"7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2\": container with ID starting with 7dbc4177acb09576fc00d6a462b9966bec59df327c88601fc047dc9ca4065cc2 not found: ID does not exist" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.957546 4703 scope.go:117] "RemoveContainer" containerID="051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.959145 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.959709 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-xtq5d\" (UniqueName: \"kubernetes.io/projected/a5197757-55f2-4c6b-96a4-f3c838e0ea9f-kube-api-access-xtq5d\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.968832 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-controller-manager-8ff54f68-8fjxm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.972730 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.976088 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/infra-operator-index-8d6lm"] Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.976847 4703 scope.go:117] "RemoveContainer" containerID="051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b" Feb 02 13:15:24 crc kubenswrapper[4703]: E0202 13:15:24.977249 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b\": container with ID starting with 051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b not found: ID does not exist" containerID="051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b" Feb 02 13:15:24 crc kubenswrapper[4703]: I0202 13:15:24.977352 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b"} err="failed to get container status \"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b\": rpc error: code = NotFound desc = could not find container \"051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b\": container with ID starting with 051f4f1c8b124c7101e7c7d74e1b7d8d16dae53a1a699faaa2c948fb14a6f62b not found: ID does not exist" Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.807451 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.807746 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" podUID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" containerName="manager" containerID="cri-o://7bc9aad6c07bba85aaf138f1a45e4c3cdf066a3673955ba9ad54df25d07fc5e5" gracePeriod=10 Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.935257 4703 generic.go:334] "Generic (PLEG): container finished" podID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" containerID="7bc9aad6c07bba85aaf138f1a45e4c3cdf066a3673955ba9ad54df25d07fc5e5" exitCode=0 Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.942878 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="1a37f2c9-188b-4c10-ac43-035262781444" path="/var/lib/kubelet/pods/1a37f2c9-188b-4c10-ac43-035262781444/volumes" Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.943436 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="2a49a90c-5c4a-4b7f-b478-6434706fa241" path="/var/lib/kubelet/pods/2a49a90c-5c4a-4b7f-b478-6434706fa241/volumes" Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.944311 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" path="/var/lib/kubelet/pods/a5197757-55f2-4c6b-96a4-f3c838e0ea9f/volumes" Feb 02 13:15:25 crc kubenswrapper[4703]: I0202 13:15:25.944669 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" event={"ID":"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa","Type":"ContainerDied","Data":"7bc9aad6c07bba85aaf138f1a45e4c3cdf066a3673955ba9ad54df25d07fc5e5"} Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.011357 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.011581 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openstack-operators/mariadb-operator-index-6p6h6" podUID="ad2463d0-0110-4de5-8371-206e16e285a4" containerName="registry-server" containerID="cri-o://bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8" gracePeriod=30 Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.054489 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5"] Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.058831 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/f5f7435db1a968bc2e4b919cf4f5a8f6719d9ac995e6b095f5b2e84f4085dg5"] Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.222470 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.290259 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-hjtf8\" (UniqueName: \"kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8\") pod \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.290325 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert\") pod \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.295807 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert" (OuterVolumeSpecName: "webhook-cert") pod "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" (UID: "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa"). InnerVolumeSpecName "webhook-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.297946 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8" (OuterVolumeSpecName: "kube-api-access-hjtf8") pod "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" (UID: "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa"). InnerVolumeSpecName "kube-api-access-hjtf8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.391189 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert\") pod \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\" (UID: \"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa\") " Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.391554 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-hjtf8\" (UniqueName: \"kubernetes.io/projected/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-kube-api-access-hjtf8\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.391579 4703 reconciler_common.go:293] "Volume detached for volume \"webhook-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-webhook-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.395828 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert" (OuterVolumeSpecName: "apiservice-cert") pod "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" (UID: "92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa"). InnerVolumeSpecName "apiservice-cert". PluginName "kubernetes.io/secret", VolumeGidValue "" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.419917 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.492306 4703 reconciler_common.go:293] "Volume detached for volume \"apiservice-cert\" (UniqueName: \"kubernetes.io/secret/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa-apiservice-cert\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.592912 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-99kq2\" (UniqueName: \"kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2\") pod \"ad2463d0-0110-4de5-8371-206e16e285a4\" (UID: \"ad2463d0-0110-4de5-8371-206e16e285a4\") " Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.603424 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2" (OuterVolumeSpecName: "kube-api-access-99kq2") pod "ad2463d0-0110-4de5-8371-206e16e285a4" (UID: "ad2463d0-0110-4de5-8371-206e16e285a4"). InnerVolumeSpecName "kube-api-access-99kq2". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.695461 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-99kq2\" (UniqueName: \"kubernetes.io/projected/ad2463d0-0110-4de5-8371-206e16e285a4-kube-api-access-99kq2\") on node \"crc\" DevicePath \"\"" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.950678 4703 generic.go:334] "Generic (PLEG): container finished" podID="ad2463d0-0110-4de5-8371-206e16e285a4" containerID="bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8" exitCode=0 Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.950738 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-index-6p6h6" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.950754 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6p6h6" event={"ID":"ad2463d0-0110-4de5-8371-206e16e285a4","Type":"ContainerDied","Data":"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8"} Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.951115 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-index-6p6h6" event={"ID":"ad2463d0-0110-4de5-8371-206e16e285a4","Type":"ContainerDied","Data":"23faca83b42d81364765217fdf7c03b68db272da10b47dea987e2a4c8b12993d"} Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.951136 4703 scope.go:117] "RemoveContainer" containerID="bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.953046 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" event={"ID":"92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa","Type":"ContainerDied","Data":"5ee66274935d1b19becd49a716de690446ed13b412c332849c7ba9c7ada8ac6f"} Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.953078 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.969614 4703 scope.go:117] "RemoveContainer" containerID="bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8" Feb 02 13:15:26 crc kubenswrapper[4703]: E0202 13:15:26.971361 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8\": container with ID starting with bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8 not found: ID does not exist" containerID="bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.971405 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8"} err="failed to get container status \"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8\": rpc error: code = NotFound desc = could not find container \"bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8\": container with ID starting with bd2aaf803d6f96d51667a467e74a713062dc6a2a35e2bbcdda2a7d4067065ed8 not found: ID does not exist" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.971430 4703 scope.go:117] "RemoveContainer" containerID="7bc9aad6c07bba85aaf138f1a45e4c3cdf066a3673955ba9ad54df25d07fc5e5" Feb 02 13:15:26 crc kubenswrapper[4703]: I0202 13:15:26.991342 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.001289 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-controller-manager-fb6479fb9-74xhb"] Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.005441 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.011988 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openstack-operators/mariadb-operator-index-6p6h6"] Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.941824 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" path="/var/lib/kubelet/pods/92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa/volumes" Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.942394 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="ad2463d0-0110-4de5-8371-206e16e285a4" path="/var/lib/kubelet/pods/ad2463d0-0110-4de5-8371-206e16e285a4/volumes" Feb 02 13:15:27 crc kubenswrapper[4703]: I0202 13:15:27.942994 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="d12aa798-a139-47d8-b857-1061cb464090" path="/var/lib/kubelet/pods/d12aa798-a139-47d8-b857-1061cb464090/volumes" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.486810 4703 scope.go:117] "RemoveContainer" containerID="74d87d84d3fac3cca2a111e73b93efd31c06226dd3c7e9a936fbf01e79dde69e" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.513083 4703 scope.go:117] "RemoveContainer" containerID="718471428c2a599fb59b394bd589db03c6f66227fda1c7971ddabf0389e19002" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.535259 4703 scope.go:117] "RemoveContainer" containerID="2e3b426cec04bb0a659fa3447e60c6f19d3e78cb10b979be86c3697a2c9dbdf5" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.556311 4703 scope.go:117] "RemoveContainer" containerID="4a5f20bbeae70aac00cfed9900111ded3e3ff5a9b7e7c4a6244c74d285d769e4" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.580923 4703 scope.go:117] "RemoveContainer" containerID="01fce3c0cf9338fd2d77e1883768042f963b508cd429449d46f5cf89f2eb117c" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.604155 4703 scope.go:117] "RemoveContainer" containerID="c0daaada96fa8c5e12d5b0d75b8fa3015d9cdbc7a659a1b5a7ef7237217ff9df" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.634455 4703 scope.go:117] "RemoveContainer" containerID="161766a10b1b949b3779045299836d9d4d2dfef5d890922e220b2fdcd4628ac8" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.655396 4703 scope.go:117] "RemoveContainer" containerID="ae8533f45ccc1a0d8810e173491a5ef92cb921cc5cb9b4d32d969268d7337415" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.670357 4703 scope.go:117] "RemoveContainer" containerID="a7cf33914bb0d202b3380fff8479205963f561a34fcce476683b7bd4620ec953" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.687333 4703 scope.go:117] "RemoveContainer" containerID="c43246162d62b06d57f0df3d50eb625a27fa35ba826d3caa850bfa319b3dee0e" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.702562 4703 scope.go:117] "RemoveContainer" containerID="9a453d5555c17faa4dda9b09d374ce9c18449d8a777be0db00b321b90d77356e" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.716388 4703 scope.go:117] "RemoveContainer" containerID="9114e1c12592b255afe4208fc64751a92b2f3e6c92ac430712712795fcac4d4d" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.730343 4703 scope.go:117] "RemoveContainer" containerID="23076747b5aa03926b67b5ca04faa7ac461f0a81964e9f049ea029a5febf7b60" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.749331 4703 scope.go:117] "RemoveContainer" containerID="9d602e76c9ca247363d0d2fc25b34c3e39ebb551fbcd3dee8aff89a61a915ade" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.767011 4703 scope.go:117] "RemoveContainer" containerID="e0d6f8eab6a1246f8a6b80e3d1827552b9981ac99ca9e492c4778f60782ce0ab" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.782120 4703 scope.go:117] "RemoveContainer" containerID="6f07c489b72e0860db3df4679565d0a777680586ad68b79b9d66aebf483023e3" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.796129 4703 scope.go:117] "RemoveContainer" containerID="393e4c516e9f2d3d8721685db60f695da8805e271d2b3f27ef4b14b8d1ac29c8" Feb 02 13:15:38 crc kubenswrapper[4703]: I0202 13:15:38.823859 4703 scope.go:117] "RemoveContainer" containerID="d3c04ad42a5a136a93926e54837b8bb20dc899fbfcc2f129a64a4b7b8eadaa49" Feb 02 13:15:45 crc kubenswrapper[4703]: I0202 13:15:45.985354 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:15:45 crc kubenswrapper[4703]: I0202 13:15:45.986052 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:15:45 crc kubenswrapper[4703]: I0202 13:15:45.986113 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:15:45 crc kubenswrapper[4703]: I0202 13:15:45.986928 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:15:45 crc kubenswrapper[4703]: I0202 13:15:45.987018 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a" gracePeriod=600 Feb 02 13:15:47 crc kubenswrapper[4703]: I0202 13:15:47.098352 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a" exitCode=0 Feb 02 13:15:47 crc kubenswrapper[4703]: I0202 13:15:47.098382 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a"} Feb 02 13:15:47 crc kubenswrapper[4703]: I0202 13:15:47.098929 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744"} Feb 02 13:15:47 crc kubenswrapper[4703]: I0202 13:15:47.098954 4703 scope.go:117] "RemoveContainer" containerID="75cfda81cf632883297446295adc654e01a62e4b398c4d8ee1de01c8cbb3f5e2" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349048 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-tpk2h/must-gather-4258r"] Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349379 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349396 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349412 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="01565f9f-8074-451e-9ebd-1b94124e364d" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349421 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="01565f9f-8074-451e-9ebd-1b94124e364d" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349434 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349443 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4f35ec6a-f298-4049-a165-04dbcd1ce6b7" containerName="mariadb-account-delete" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349461 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349469 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349479 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="1a37f2c9-188b-4c10-ac43-035262781444" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349487 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="1a37f2c9-188b-4c10-ac43-035262781444" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349502 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4460b73f-1e46-424f-896a-64e152c5976c" containerName="ceph" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349511 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4460b73f-1e46-424f-896a-64e152c5976c" containerName="ceph" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349521 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="86515156-fe01-412d-a10d-16ba26cfb8f8" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349529 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="86515156-fe01-412d-a10d-16ba26cfb8f8" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349537 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" containerName="collect-profiles" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349546 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" containerName="collect-profiles" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349561 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6a995ed1-b4b5-43d3-91cf-2f461916e342" containerName="manila-service-cleanup-n5b5h655" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349569 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6a995ed1-b4b5-43d3-91cf-2f461916e342" containerName="manila-service-cleanup-n5b5h655" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349585 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349595 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349615 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" containerName="operator" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349624 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" containerName="operator" Feb 02 13:15:48 crc kubenswrapper[4703]: E0202 13:15:48.349636 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="ad2463d0-0110-4de5-8371-206e16e285a4" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349644 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="ad2463d0-0110-4de5-8371-206e16e285a4" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349772 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="92e67a2f-4d4f-4d5d-a80a-8922aab2e8aa" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349786 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="86515156-fe01-412d-a10d-16ba26cfb8f8" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349799 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="6a995ed1-b4b5-43d3-91cf-2f461916e342" containerName="manila-service-cleanup-n5b5h655" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349810 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="f3e89261-c5b4-429d-b84e-4a89fa66b98b" containerName="operator" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349820 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="ad2463d0-0110-4de5-8371-206e16e285a4" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349833 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="3b5301e6-77c6-4fd1-a97c-8ec7be30d794" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349848 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="eaf0ee9c-5c07-4bb7-87ef-6a838afe8ccb" containerName="collect-profiles" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349859 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4460b73f-1e46-424f-896a-64e152c5976c" containerName="ceph" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349872 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a5197757-55f2-4c6b-96a4-f3c838e0ea9f" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349882 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="1a37f2c9-188b-4c10-ac43-035262781444" containerName="manager" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.349892 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="01565f9f-8074-451e-9ebd-1b94124e364d" containerName="registry-server" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.350648 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.353441 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tpk2h"/"openshift-service-ca.crt" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.353515 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-tpk2h"/"kube-root-ca.crt" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.364930 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tpk2h/must-gather-4258r"] Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.489393 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.489679 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-5nvdv\" (UniqueName: \"kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.590830 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-5nvdv\" (UniqueName: \"kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.590954 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.591433 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.612929 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-5nvdv\" (UniqueName: \"kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv\") pod \"must-gather-4258r\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.670138 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.856509 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-tpk2h/must-gather-4258r"] Feb 02 13:15:48 crc kubenswrapper[4703]: W0202 13:15:48.870111 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-pod237ae792_b6da_44bd_8618_7e84e3778277.slice/crio-510de6d9a2301503ba49efa5aefbc2567b2515518a1954dc7e396dcf8cd6c359 WatchSource:0}: Error finding container 510de6d9a2301503ba49efa5aefbc2567b2515518a1954dc7e396dcf8cd6c359: Status 404 returned error can't find the container with id 510de6d9a2301503ba49efa5aefbc2567b2515518a1954dc7e396dcf8cd6c359 Feb 02 13:15:48 crc kubenswrapper[4703]: I0202 13:15:48.872694 4703 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 13:15:49 crc kubenswrapper[4703]: I0202 13:15:49.114705 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tpk2h/must-gather-4258r" event={"ID":"237ae792-b6da-44bd-8618-7e84e3778277","Type":"ContainerStarted","Data":"510de6d9a2301503ba49efa5aefbc2567b2515518a1954dc7e396dcf8cd6c359"} Feb 02 13:15:53 crc kubenswrapper[4703]: I0202 13:15:53.138878 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tpk2h/must-gather-4258r" event={"ID":"237ae792-b6da-44bd-8618-7e84e3778277","Type":"ContainerStarted","Data":"6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319"} Feb 02 13:15:53 crc kubenswrapper[4703]: I0202 13:15:53.139619 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tpk2h/must-gather-4258r" event={"ID":"237ae792-b6da-44bd-8618-7e84e3778277","Type":"ContainerStarted","Data":"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa"} Feb 02 13:15:53 crc kubenswrapper[4703]: I0202 13:15:53.156353 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-must-gather-tpk2h/must-gather-4258r" podStartSLOduration=1.822495376 podStartE2EDuration="5.156334561s" podCreationTimestamp="2026-02-02 13:15:48 +0000 UTC" firstStartedPulling="2026-02-02 13:15:48.872653934 +0000 UTC m=+1475.887861468" lastFinishedPulling="2026-02-02 13:15:52.206493119 +0000 UTC m=+1479.221700653" observedRunningTime="2026-02-02 13:15:53.152792321 +0000 UTC m=+1480.167999855" watchObservedRunningTime="2026-02-02 13:15:53.156334561 +0000 UTC m=+1480.171542095" Feb 02 13:16:38 crc kubenswrapper[4703]: I0202 13:16:38.533448 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-z7cwj_71a6d661-54a6-430a-902b-656503b8d12a/control-plane-machine-set-operator/0.log" Feb 02 13:16:38 crc kubenswrapper[4703]: I0202 13:16:38.706981 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bsrg2_335fb805-5368-4ab2-bdb6-d642aeb11902/kube-rbac-proxy/0.log" Feb 02 13:16:38 crc kubenswrapper[4703]: I0202 13:16:38.707649 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bsrg2_335fb805-5368-4ab2-bdb6-d642aeb11902/machine-api-operator/0.log" Feb 02 13:16:39 crc kubenswrapper[4703]: I0202 13:16:39.132577 4703 scope.go:117] "RemoveContainer" containerID="19012193497d1a378e86e5ef71271fc57bc9283da76bd10b8973562fdc70f5a9" Feb 02 13:16:39 crc kubenswrapper[4703]: I0202 13:16:39.162467 4703 scope.go:117] "RemoveContainer" containerID="66e0f2dfb7fd00f8b497a45aa0a06bbe40c47ead365098938dea9649010b885e" Feb 02 13:16:39 crc kubenswrapper[4703]: I0202 13:16:39.180374 4703 scope.go:117] "RemoveContainer" containerID="d0fad469510bf40a0243e72c3f70291266e246d08df711d6a046f44cd914c7ca" Feb 02 13:16:59 crc kubenswrapper[4703]: I0202 13:16:59.922661 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:16:59 crc kubenswrapper[4703]: I0202 13:16:59.925361 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:16:59 crc kubenswrapper[4703]: I0202 13:16:59.944650 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.010203 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-d9m7l\" (UniqueName: \"kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.010283 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.010342 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.111896 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-d9m7l\" (UniqueName: \"kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.111951 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.111996 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.112509 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.112654 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.132591 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-d9m7l\" (UniqueName: \"kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l\") pod \"redhat-marketplace-pjl47\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.251202 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.667105 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:17:00 crc kubenswrapper[4703]: I0202 13:17:00.795779 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerStarted","Data":"e8dcc9832eb53599eb4fa76f84f2e8406bc24ce6cb00ef203a88ca4b3d18a1ef"} Feb 02 13:17:01 crc kubenswrapper[4703]: I0202 13:17:01.803351 4703 generic.go:334] "Generic (PLEG): container finished" podID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerID="1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46" exitCode=0 Feb 02 13:17:01 crc kubenswrapper[4703]: I0202 13:17:01.803514 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerDied","Data":"1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46"} Feb 02 13:17:02 crc kubenswrapper[4703]: I0202 13:17:02.810372 4703 generic.go:334] "Generic (PLEG): container finished" podID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerID="21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5" exitCode=0 Feb 02 13:17:02 crc kubenswrapper[4703]: I0202 13:17:02.810407 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerDied","Data":"21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5"} Feb 02 13:17:03 crc kubenswrapper[4703]: I0202 13:17:03.818304 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerStarted","Data":"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760"} Feb 02 13:17:03 crc kubenswrapper[4703]: I0202 13:17:03.838494 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-pjl47" podStartSLOduration=3.315265082 podStartE2EDuration="4.838470782s" podCreationTimestamp="2026-02-02 13:16:59 +0000 UTC" firstStartedPulling="2026-02-02 13:17:01.805494664 +0000 UTC m=+1548.820702198" lastFinishedPulling="2026-02-02 13:17:03.328700364 +0000 UTC m=+1550.343907898" observedRunningTime="2026-02-02 13:17:03.833570712 +0000 UTC m=+1550.848778246" watchObservedRunningTime="2026-02-02 13:17:03.838470782 +0000 UTC m=+1550.853678316" Feb 02 13:17:04 crc kubenswrapper[4703]: I0202 13:17:04.883874 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-wxlfd_5fc3b818-d481-4eb8-b065-b03384d4f164/kube-rbac-proxy/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.566388 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.566443 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.571054 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.660944 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-wxlfd_5fc3b818-d481-4eb8-b065-b03384d4f164/controller/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.748032 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.787502 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:17:05 crc kubenswrapper[4703]: I0202 13:17:05.978454 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.000518 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.000550 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.017264 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.193411 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.239091 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/controller/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.250719 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.253069 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.424103 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/frr-metrics/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.444498 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/kube-rbac-proxy/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.476536 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/kube-rbac-proxy-frr/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.611853 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/reloader/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.664159 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-dfdkw_1ff8e6a8-2795-4cb8-9550-75b3129ef6b4/frr-k8s-webhook-server/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.851161 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/frr/0.log" Feb 02 13:17:06 crc kubenswrapper[4703]: I0202 13:17:06.891069 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54d9d5b5d6-wdjdr_8d876cf0-0b55-4941-9b60-7258381875ec/manager/0.log" Feb 02 13:17:07 crc kubenswrapper[4703]: I0202 13:17:07.010052 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-65699778c9-782nc_9c3d0481-1b4c-4d06-8605-bb6079b162b8/webhook-server/0.log" Feb 02 13:17:07 crc kubenswrapper[4703]: I0202 13:17:07.045383 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zgzbl_622b2024-9b5b-4964-aed7-52b15352cd1d/kube-rbac-proxy/0.log" Feb 02 13:17:07 crc kubenswrapper[4703]: I0202 13:17:07.156779 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zgzbl_622b2024-9b5b-4964-aed7-52b15352cd1d/speaker/0.log" Feb 02 13:17:10 crc kubenswrapper[4703]: I0202 13:17:10.251559 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:10 crc kubenswrapper[4703]: I0202 13:17:10.251821 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:10 crc kubenswrapper[4703]: I0202 13:17:10.293886 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:10 crc kubenswrapper[4703]: I0202 13:17:10.897133 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:10 crc kubenswrapper[4703]: I0202 13:17:10.946705 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:17:12 crc kubenswrapper[4703]: I0202 13:17:12.866688 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-pjl47" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="registry-server" containerID="cri-o://c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760" gracePeriod=2 Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.222585 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.354095 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities\") pod \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.354210 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-d9m7l\" (UniqueName: \"kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l\") pod \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.354355 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content\") pod \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\" (UID: \"b732669a-b8e3-4f0f-867c-53aca27cfb7f\") " Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.355619 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities" (OuterVolumeSpecName: "utilities") pod "b732669a-b8e3-4f0f-867c-53aca27cfb7f" (UID: "b732669a-b8e3-4f0f-867c-53aca27cfb7f"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.365167 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l" (OuterVolumeSpecName: "kube-api-access-d9m7l") pod "b732669a-b8e3-4f0f-867c-53aca27cfb7f" (UID: "b732669a-b8e3-4f0f-867c-53aca27cfb7f"). InnerVolumeSpecName "kube-api-access-d9m7l". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.383483 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "b732669a-b8e3-4f0f-867c-53aca27cfb7f" (UID: "b732669a-b8e3-4f0f-867c-53aca27cfb7f"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.456186 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.456229 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b732669a-b8e3-4f0f-867c-53aca27cfb7f-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.456243 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-d9m7l\" (UniqueName: \"kubernetes.io/projected/b732669a-b8e3-4f0f-867c-53aca27cfb7f-kube-api-access-d9m7l\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.873519 4703 generic.go:334] "Generic (PLEG): container finished" podID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerID="c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760" exitCode=0 Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.873557 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerDied","Data":"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760"} Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.873581 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-pjl47" event={"ID":"b732669a-b8e3-4f0f-867c-53aca27cfb7f","Type":"ContainerDied","Data":"e8dcc9832eb53599eb4fa76f84f2e8406bc24ce6cb00ef203a88ca4b3d18a1ef"} Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.873598 4703 scope.go:117] "RemoveContainer" containerID="c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.873696 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-pjl47" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.894480 4703 scope.go:117] "RemoveContainer" containerID="21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.900299 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.904282 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-pjl47"] Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.930815 4703 scope.go:117] "RemoveContainer" containerID="1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.943057 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" path="/var/lib/kubelet/pods/b732669a-b8e3-4f0f-867c-53aca27cfb7f/volumes" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.948406 4703 scope.go:117] "RemoveContainer" containerID="c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760" Feb 02 13:17:13 crc kubenswrapper[4703]: E0202 13:17:13.948761 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760\": container with ID starting with c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760 not found: ID does not exist" containerID="c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.948796 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760"} err="failed to get container status \"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760\": rpc error: code = NotFound desc = could not find container \"c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760\": container with ID starting with c5a4379a36eb7aaff442745e209550dc938cffe31c3857d5087026d852af9760 not found: ID does not exist" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.948815 4703 scope.go:117] "RemoveContainer" containerID="21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5" Feb 02 13:17:13 crc kubenswrapper[4703]: E0202 13:17:13.949177 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5\": container with ID starting with 21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5 not found: ID does not exist" containerID="21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.949221 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5"} err="failed to get container status \"21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5\": rpc error: code = NotFound desc = could not find container \"21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5\": container with ID starting with 21a0fbb394abccaf5455f3c8dd37d7731dac114af6c255cc7d7b833e789b79b5 not found: ID does not exist" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.949255 4703 scope.go:117] "RemoveContainer" containerID="1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46" Feb 02 13:17:13 crc kubenswrapper[4703]: E0202 13:17:13.949594 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46\": container with ID starting with 1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46 not found: ID does not exist" containerID="1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46" Feb 02 13:17:13 crc kubenswrapper[4703]: I0202 13:17:13.949618 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46"} err="failed to get container status \"1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46\": rpc error: code = NotFound desc = could not find container \"1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46\": container with ID starting with 1de111cea05a7e66dfbe93c33e49da284796434da1871ae438b139ba25c24a46 not found: ID does not exist" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.783853 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:25 crc kubenswrapper[4703]: E0202 13:17:25.784644 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="extract-content" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.784659 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="extract-content" Feb 02 13:17:25 crc kubenswrapper[4703]: E0202 13:17:25.784669 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="registry-server" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.784677 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="registry-server" Feb 02 13:17:25 crc kubenswrapper[4703]: E0202 13:17:25.784693 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="extract-utilities" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.784702 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="extract-utilities" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.784838 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="b732669a-b8e3-4f0f-867c-53aca27cfb7f" containerName="registry-server" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.785718 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.794896 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.929921 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.929986 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:25 crc kubenswrapper[4703]: I0202 13:17:25.930037 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-ntwj8\" (UniqueName: \"kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.031542 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-ntwj8\" (UniqueName: \"kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.031658 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.031729 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.032396 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.032430 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.060166 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-ntwj8\" (UniqueName: \"kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8\") pod \"redhat-operators-d4wwh\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.133901 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.562754 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:26 crc kubenswrapper[4703]: E0202 13:17:26.764118 4703 cadvisor_stats_provider.go:516] "Partial failure issuing cadvisor.ContainerInfoV2" err="partial failures: [\"/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-pod3eb48324_9478_41ed_958b_23df01c4c487.slice/crio-conmon-6a25f6a1776d213850c8b1b21c1596100ecd3708cb6a2a4314c97c75c42d3c95.scope\": RecentStats: unable to find data in memory cache]" Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.949987 4703 generic.go:334] "Generic (PLEG): container finished" podID="3eb48324-9478-41ed-958b-23df01c4c487" containerID="6a25f6a1776d213850c8b1b21c1596100ecd3708cb6a2a4314c97c75c42d3c95" exitCode=0 Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.950113 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerDied","Data":"6a25f6a1776d213850c8b1b21c1596100ecd3708cb6a2a4314c97c75c42d3c95"} Feb 02 13:17:26 crc kubenswrapper[4703]: I0202 13:17:26.950264 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerStarted","Data":"bf4344cbbcbd3ea6a93e3be1e895fa468e7f46ee1fcdd34755c88a68739ef039"} Feb 02 13:17:28 crc kubenswrapper[4703]: I0202 13:17:28.962344 4703 generic.go:334] "Generic (PLEG): container finished" podID="3eb48324-9478-41ed-958b-23df01c4c487" containerID="3cfd9d812772c886357a80c2fadd74c8f40a6ca6af8fc9d2e9330b9dde42dae7" exitCode=0 Feb 02 13:17:28 crc kubenswrapper[4703]: I0202 13:17:28.962391 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerDied","Data":"3cfd9d812772c886357a80c2fadd74c8f40a6ca6af8fc9d2e9330b9dde42dae7"} Feb 02 13:17:29 crc kubenswrapper[4703]: I0202 13:17:29.969824 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerStarted","Data":"171103e54cc9fe73b78c47b1157ca366a05eb740759ce1b983dfd039ce7cae1c"} Feb 02 13:17:29 crc kubenswrapper[4703]: I0202 13:17:29.991469 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-operators-d4wwh" podStartSLOduration=2.304555057 podStartE2EDuration="4.991447291s" podCreationTimestamp="2026-02-02 13:17:25 +0000 UTC" firstStartedPulling="2026-02-02 13:17:26.951472155 +0000 UTC m=+1573.966679689" lastFinishedPulling="2026-02-02 13:17:29.638364389 +0000 UTC m=+1576.653571923" observedRunningTime="2026-02-02 13:17:29.988399844 +0000 UTC m=+1577.003607378" watchObservedRunningTime="2026-02-02 13:17:29.991447291 +0000 UTC m=+1577.006654825" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.176905 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.312241 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.382081 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.424145 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.629134 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/extract/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.740376 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.775427 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:17:30 crc kubenswrapper[4703]: I0202 13:17:30.890987 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-utilities/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.049176 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-utilities/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.073896 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.084376 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.242061 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-utilities/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.279867 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.443842 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.669358 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-mfzvq_4c522108-2465-4905-9703-5dfd173bafdb/registry-server/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.686609 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.801585 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.803314 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.886634 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:17:31 crc kubenswrapper[4703]: I0202 13:17:31.893942 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.192303 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tbv5t_71ff2212-c3fe-45e2-87ef-a4d4e19ce91d/marketplace-operator/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.219737 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.388593 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/registry-server/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.488877 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.544895 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.569989 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.800730 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.808366 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.894432 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/registry-server/0.log" Feb 02 13:17:32 crc kubenswrapper[4703]: I0202 13:17:32.984767 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-utilities/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.243346 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-content/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.247178 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-utilities/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.272674 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-content/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.395724 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-content/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.448142 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/extract-utilities/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.453030 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-d4wwh_3eb48324-9478-41ed-958b-23df01c4c487/registry-server/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.563329 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.775470 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.776319 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:17:33 crc kubenswrapper[4703]: I0202 13:17:33.785236 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:17:34 crc kubenswrapper[4703]: I0202 13:17:34.148567 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:17:34 crc kubenswrapper[4703]: I0202 13:17:34.173907 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:17:34 crc kubenswrapper[4703]: I0202 13:17:34.416200 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/registry-server/0.log" Feb 02 13:17:36 crc kubenswrapper[4703]: I0202 13:17:36.134296 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:36 crc kubenswrapper[4703]: I0202 13:17:36.134364 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:36 crc kubenswrapper[4703]: I0202 13:17:36.174397 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:37 crc kubenswrapper[4703]: I0202 13:17:37.049337 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:37 crc kubenswrapper[4703]: I0202 13:17:37.098071 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:39 crc kubenswrapper[4703]: I0202 13:17:39.019755 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-operators-d4wwh" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="registry-server" containerID="cri-o://171103e54cc9fe73b78c47b1157ca366a05eb740759ce1b983dfd039ce7cae1c" gracePeriod=2 Feb 02 13:17:39 crc kubenswrapper[4703]: I0202 13:17:39.237370 4703 scope.go:117] "RemoveContainer" containerID="bc428931cc4afb49b9715e8947ae0f071ff5fc6b16d99c9fa5a91440b2ed6440" Feb 02 13:17:39 crc kubenswrapper[4703]: I0202 13:17:39.272324 4703 scope.go:117] "RemoveContainer" containerID="2e6ca297674ed59cad2e2be0d777b7b349f8ccacd0eaf527db3ee4008e8d5a2c" Feb 02 13:17:39 crc kubenswrapper[4703]: I0202 13:17:39.299518 4703 scope.go:117] "RemoveContainer" containerID="a66d59070de56715719c365d9b133db8a9c42bf1972ab036a6a12c0491e0d614" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.559529 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.560791 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.569536 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.632976 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kxz9g\" (UniqueName: \"kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.633047 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.633119 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.733777 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.733852 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.733893 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kxz9g\" (UniqueName: \"kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.734799 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.734810 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.768432 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kxz9g\" (UniqueName: \"kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g\") pod \"community-operators-s65rg\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:40 crc kubenswrapper[4703]: I0202 13:17:40.877104 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.061128 4703 generic.go:334] "Generic (PLEG): container finished" podID="3eb48324-9478-41ed-958b-23df01c4c487" containerID="171103e54cc9fe73b78c47b1157ca366a05eb740759ce1b983dfd039ce7cae1c" exitCode=0 Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.061165 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerDied","Data":"171103e54cc9fe73b78c47b1157ca366a05eb740759ce1b983dfd039ce7cae1c"} Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.316177 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.388267 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:41 crc kubenswrapper[4703]: W0202 13:17:41.395827 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-burstable.slice/kubepods-burstable-podc0807450_8cb3_4b02_9435_33f9db39497d.slice/crio-002a2d2b7c7a6c3d5a34bcb1eac3dc5b22956de55a18c8bb24d343fe6c4c73b9 WatchSource:0}: Error finding container 002a2d2b7c7a6c3d5a34bcb1eac3dc5b22956de55a18c8bb24d343fe6c4c73b9: Status 404 returned error can't find the container with id 002a2d2b7c7a6c3d5a34bcb1eac3dc5b22956de55a18c8bb24d343fe6c4c73b9 Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.442879 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-ntwj8\" (UniqueName: \"kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8\") pod \"3eb48324-9478-41ed-958b-23df01c4c487\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.442956 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content\") pod \"3eb48324-9478-41ed-958b-23df01c4c487\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.443038 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities\") pod \"3eb48324-9478-41ed-958b-23df01c4c487\" (UID: \"3eb48324-9478-41ed-958b-23df01c4c487\") " Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.443874 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities" (OuterVolumeSpecName: "utilities") pod "3eb48324-9478-41ed-958b-23df01c4c487" (UID: "3eb48324-9478-41ed-958b-23df01c4c487"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.447459 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8" (OuterVolumeSpecName: "kube-api-access-ntwj8") pod "3eb48324-9478-41ed-958b-23df01c4c487" (UID: "3eb48324-9478-41ed-958b-23df01c4c487"). InnerVolumeSpecName "kube-api-access-ntwj8". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.545062 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.545110 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-ntwj8\" (UniqueName: \"kubernetes.io/projected/3eb48324-9478-41ed-958b-23df01c4c487-kube-api-access-ntwj8\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.550934 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "3eb48324-9478-41ed-958b-23df01c4c487" (UID: "3eb48324-9478-41ed-958b-23df01c4c487"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:41 crc kubenswrapper[4703]: I0202 13:17:41.646875 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/3eb48324-9478-41ed-958b-23df01c4c487-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.070453 4703 generic.go:334] "Generic (PLEG): container finished" podID="c0807450-8cb3-4b02-9435-33f9db39497d" containerID="434d7beee1d95925c43b8a8d0ce5aa2599961727d80ec80d5c982215769a38a3" exitCode=0 Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.070540 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerDied","Data":"434d7beee1d95925c43b8a8d0ce5aa2599961727d80ec80d5c982215769a38a3"} Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.070578 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerStarted","Data":"002a2d2b7c7a6c3d5a34bcb1eac3dc5b22956de55a18c8bb24d343fe6c4c73b9"} Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.073889 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-operators-d4wwh" event={"ID":"3eb48324-9478-41ed-958b-23df01c4c487","Type":"ContainerDied","Data":"bf4344cbbcbd3ea6a93e3be1e895fa468e7f46ee1fcdd34755c88a68739ef039"} Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.073927 4703 scope.go:117] "RemoveContainer" containerID="171103e54cc9fe73b78c47b1157ca366a05eb740759ce1b983dfd039ce7cae1c" Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.073981 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-operators-d4wwh" Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.090135 4703 scope.go:117] "RemoveContainer" containerID="3cfd9d812772c886357a80c2fadd74c8f40a6ca6af8fc9d2e9330b9dde42dae7" Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.116560 4703 scope.go:117] "RemoveContainer" containerID="6a25f6a1776d213850c8b1b21c1596100ecd3708cb6a2a4314c97c75c42d3c95" Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.130210 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:42 crc kubenswrapper[4703]: I0202 13:17:42.130271 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-operators-d4wwh"] Feb 02 13:17:43 crc kubenswrapper[4703]: I0202 13:17:43.082321 4703 generic.go:334] "Generic (PLEG): container finished" podID="c0807450-8cb3-4b02-9435-33f9db39497d" containerID="f58aa402e864d092e33077bc8f128f4adf231ff63f20e5384678392781ae64a8" exitCode=0 Feb 02 13:17:43 crc kubenswrapper[4703]: I0202 13:17:43.082405 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerDied","Data":"f58aa402e864d092e33077bc8f128f4adf231ff63f20e5384678392781ae64a8"} Feb 02 13:17:43 crc kubenswrapper[4703]: I0202 13:17:43.941653 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="3eb48324-9478-41ed-958b-23df01c4c487" path="/var/lib/kubelet/pods/3eb48324-9478-41ed-958b-23df01c4c487/volumes" Feb 02 13:17:44 crc kubenswrapper[4703]: I0202 13:17:44.091885 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerStarted","Data":"148513474ca73daf4a6dc8e0398f2c4d39ca22fd57a79c5422c2708924b726e5"} Feb 02 13:17:44 crc kubenswrapper[4703]: I0202 13:17:44.108534 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-s65rg" podStartSLOduration=2.676939363 podStartE2EDuration="4.108518291s" podCreationTimestamp="2026-02-02 13:17:40 +0000 UTC" firstStartedPulling="2026-02-02 13:17:42.0729297 +0000 UTC m=+1589.088137234" lastFinishedPulling="2026-02-02 13:17:43.504508628 +0000 UTC m=+1590.519716162" observedRunningTime="2026-02-02 13:17:44.106497314 +0000 UTC m=+1591.121704848" watchObservedRunningTime="2026-02-02 13:17:44.108518291 +0000 UTC m=+1591.123725825" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.665107 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/certified-operators-g77z4"] Feb 02 13:17:50 crc kubenswrapper[4703]: E0202 13:17:50.666068 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="registry-server" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.666086 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="registry-server" Feb 02 13:17:50 crc kubenswrapper[4703]: E0202 13:17:50.666105 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="extract-content" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.666113 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="extract-content" Feb 02 13:17:50 crc kubenswrapper[4703]: E0202 13:17:50.666124 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="extract-utilities" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.666133 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="extract-utilities" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.666256 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="3eb48324-9478-41ed-958b-23df01c4c487" containerName="registry-server" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.673429 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.700942 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g77z4"] Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.773126 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-qcss4\" (UniqueName: \"kubernetes.io/projected/b56a5aee-bc7b-4d7d-83a0-50292348f157-kube-api-access-qcss4\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.773187 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-utilities\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.773324 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-catalog-content\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.874506 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-qcss4\" (UniqueName: \"kubernetes.io/projected/b56a5aee-bc7b-4d7d-83a0-50292348f157-kube-api-access-qcss4\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.874550 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-utilities\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.874608 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-catalog-content\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.875068 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-catalog-content\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.875136 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/b56a5aee-bc7b-4d7d-83a0-50292348f157-utilities\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.878162 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.878216 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.907449 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-qcss4\" (UniqueName: \"kubernetes.io/projected/b56a5aee-bc7b-4d7d-83a0-50292348f157-kube-api-access-qcss4\") pod \"certified-operators-g77z4\" (UID: \"b56a5aee-bc7b-4d7d-83a0-50292348f157\") " pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:50 crc kubenswrapper[4703]: I0202 13:17:50.920678 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:51 crc kubenswrapper[4703]: I0202 13:17:51.000540 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:17:51 crc kubenswrapper[4703]: I0202 13:17:51.346248 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:51 crc kubenswrapper[4703]: I0202 13:17:51.639328 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g77z4"] Feb 02 13:17:52 crc kubenswrapper[4703]: I0202 13:17:52.133742 4703 generic.go:334] "Generic (PLEG): container finished" podID="b56a5aee-bc7b-4d7d-83a0-50292348f157" containerID="02e725b0bb8f508c5351bfb51ed4b66b940e1114696e0112e4afe9b7bf27d20d" exitCode=0 Feb 02 13:17:52 crc kubenswrapper[4703]: I0202 13:17:52.133791 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g77z4" event={"ID":"b56a5aee-bc7b-4d7d-83a0-50292348f157","Type":"ContainerDied","Data":"02e725b0bb8f508c5351bfb51ed4b66b940e1114696e0112e4afe9b7bf27d20d"} Feb 02 13:17:52 crc kubenswrapper[4703]: I0202 13:17:52.133845 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g77z4" event={"ID":"b56a5aee-bc7b-4d7d-83a0-50292348f157","Type":"ContainerStarted","Data":"1593344414e3e4afacf32ba2d5289bb846376e421003ba0dd9441364f067219d"} Feb 02 13:17:53 crc kubenswrapper[4703]: I0202 13:17:53.243752 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:53 crc kubenswrapper[4703]: I0202 13:17:53.244571 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-s65rg" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="registry-server" containerID="cri-o://148513474ca73daf4a6dc8e0398f2c4d39ca22fd57a79c5422c2708924b726e5" gracePeriod=2 Feb 02 13:17:54 crc kubenswrapper[4703]: I0202 13:17:54.149748 4703 generic.go:334] "Generic (PLEG): container finished" podID="c0807450-8cb3-4b02-9435-33f9db39497d" containerID="148513474ca73daf4a6dc8e0398f2c4d39ca22fd57a79c5422c2708924b726e5" exitCode=0 Feb 02 13:17:54 crc kubenswrapper[4703]: I0202 13:17:54.149794 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerDied","Data":"148513474ca73daf4a6dc8e0398f2c4d39ca22fd57a79c5422c2708924b726e5"} Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.757648 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.883993 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kxz9g\" (UniqueName: \"kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g\") pod \"c0807450-8cb3-4b02-9435-33f9db39497d\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.884048 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities\") pod \"c0807450-8cb3-4b02-9435-33f9db39497d\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.884210 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content\") pod \"c0807450-8cb3-4b02-9435-33f9db39497d\" (UID: \"c0807450-8cb3-4b02-9435-33f9db39497d\") " Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.884847 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities" (OuterVolumeSpecName: "utilities") pod "c0807450-8cb3-4b02-9435-33f9db39497d" (UID: "c0807450-8cb3-4b02-9435-33f9db39497d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.897461 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g" (OuterVolumeSpecName: "kube-api-access-kxz9g") pod "c0807450-8cb3-4b02-9435-33f9db39497d" (UID: "c0807450-8cb3-4b02-9435-33f9db39497d"). InnerVolumeSpecName "kube-api-access-kxz9g". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.933163 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "c0807450-8cb3-4b02-9435-33f9db39497d" (UID: "c0807450-8cb3-4b02-9435-33f9db39497d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.985563 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.985593 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kxz9g\" (UniqueName: \"kubernetes.io/projected/c0807450-8cb3-4b02-9435-33f9db39497d-kube-api-access-kxz9g\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:56 crc kubenswrapper[4703]: I0202 13:17:56.985605 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/c0807450-8cb3-4b02-9435-33f9db39497d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.169870 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-s65rg" event={"ID":"c0807450-8cb3-4b02-9435-33f9db39497d","Type":"ContainerDied","Data":"002a2d2b7c7a6c3d5a34bcb1eac3dc5b22956de55a18c8bb24d343fe6c4c73b9"} Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.169933 4703 scope.go:117] "RemoveContainer" containerID="148513474ca73daf4a6dc8e0398f2c4d39ca22fd57a79c5422c2708924b726e5" Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.170090 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-s65rg" Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.213012 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.216651 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-s65rg"] Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.940981 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" path="/var/lib/kubelet/pods/c0807450-8cb3-4b02-9435-33f9db39497d/volumes" Feb 02 13:17:57 crc kubenswrapper[4703]: I0202 13:17:57.984903 4703 scope.go:117] "RemoveContainer" containerID="f58aa402e864d092e33077bc8f128f4adf231ff63f20e5384678392781ae64a8" Feb 02 13:17:58 crc kubenswrapper[4703]: I0202 13:17:58.058507 4703 scope.go:117] "RemoveContainer" containerID="434d7beee1d95925c43b8a8d0ce5aa2599961727d80ec80d5c982215769a38a3" Feb 02 13:17:59 crc kubenswrapper[4703]: I0202 13:17:59.186456 4703 generic.go:334] "Generic (PLEG): container finished" podID="b56a5aee-bc7b-4d7d-83a0-50292348f157" containerID="20af840ad1eb38e8fbca4733953ee63820461c864d9f729b9d84d3443ceebb14" exitCode=0 Feb 02 13:17:59 crc kubenswrapper[4703]: I0202 13:17:59.186494 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g77z4" event={"ID":"b56a5aee-bc7b-4d7d-83a0-50292348f157","Type":"ContainerDied","Data":"20af840ad1eb38e8fbca4733953ee63820461c864d9f729b9d84d3443ceebb14"} Feb 02 13:18:00 crc kubenswrapper[4703]: I0202 13:18:00.193926 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-g77z4" event={"ID":"b56a5aee-bc7b-4d7d-83a0-50292348f157","Type":"ContainerStarted","Data":"f9756d8c169e9d0c5608fb3a5f2ecb132ff28a67fbdd8cb99d475cea14c7de61"} Feb 02 13:18:01 crc kubenswrapper[4703]: I0202 13:18:01.001510 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:18:01 crc kubenswrapper[4703]: I0202 13:18:01.001547 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:18:02 crc kubenswrapper[4703]: I0202 13:18:02.047389 4703 prober.go:107] "Probe failed" probeType="Startup" pod="openshift-marketplace/certified-operators-g77z4" podUID="b56a5aee-bc7b-4d7d-83a0-50292348f157" containerName="registry-server" probeResult="failure" output=< Feb 02 13:18:02 crc kubenswrapper[4703]: timeout: failed to connect service ":50051" within 1s Feb 02 13:18:02 crc kubenswrapper[4703]: > Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.067653 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.092512 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/certified-operators-g77z4" podStartSLOduration=13.625306427 podStartE2EDuration="21.092492534s" podCreationTimestamp="2026-02-02 13:17:50 +0000 UTC" firstStartedPulling="2026-02-02 13:17:52.13687155 +0000 UTC m=+1599.152079084" lastFinishedPulling="2026-02-02 13:17:59.604057657 +0000 UTC m=+1606.619265191" observedRunningTime="2026-02-02 13:18:00.221493883 +0000 UTC m=+1607.236701417" watchObservedRunningTime="2026-02-02 13:18:11.092492534 +0000 UTC m=+1618.107700078" Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.131697 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/certified-operators-g77z4" Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.613468 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/certified-operators-g77z4"] Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.771819 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 13:18:11 crc kubenswrapper[4703]: I0202 13:18:11.772147 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/certified-operators-mfzvq" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="registry-server" containerID="cri-o://97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c" gracePeriod=2 Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.149451 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.195771 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content\") pod \"4c522108-2465-4905-9703-5dfd173bafdb\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.195873 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-wrjv6\" (UniqueName: \"kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6\") pod \"4c522108-2465-4905-9703-5dfd173bafdb\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.195936 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities\") pod \"4c522108-2465-4905-9703-5dfd173bafdb\" (UID: \"4c522108-2465-4905-9703-5dfd173bafdb\") " Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.196840 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities" (OuterVolumeSpecName: "utilities") pod "4c522108-2465-4905-9703-5dfd173bafdb" (UID: "4c522108-2465-4905-9703-5dfd173bafdb"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.201389 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6" (OuterVolumeSpecName: "kube-api-access-wrjv6") pod "4c522108-2465-4905-9703-5dfd173bafdb" (UID: "4c522108-2465-4905-9703-5dfd173bafdb"). InnerVolumeSpecName "kube-api-access-wrjv6". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.238377 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "4c522108-2465-4905-9703-5dfd173bafdb" (UID: "4c522108-2465-4905-9703-5dfd173bafdb"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.274745 4703 generic.go:334] "Generic (PLEG): container finished" podID="4c522108-2465-4905-9703-5dfd173bafdb" containerID="97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c" exitCode=0 Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.275366 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/certified-operators-mfzvq" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.283399 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerDied","Data":"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c"} Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.283443 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/certified-operators-mfzvq" event={"ID":"4c522108-2465-4905-9703-5dfd173bafdb","Type":"ContainerDied","Data":"3cbc9061bcc60d825b5054f71a3f4ea3aa0457bc6c72a067b1611b4583969d27"} Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.283463 4703 scope.go:117] "RemoveContainer" containerID="97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.296952 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-wrjv6\" (UniqueName: \"kubernetes.io/projected/4c522108-2465-4905-9703-5dfd173bafdb-kube-api-access-wrjv6\") on node \"crc\" DevicePath \"\"" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.296984 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.296998 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/4c522108-2465-4905-9703-5dfd173bafdb-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.305827 4703 scope.go:117] "RemoveContainer" containerID="efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.308327 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.311037 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/certified-operators-mfzvq"] Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.338970 4703 scope.go:117] "RemoveContainer" containerID="ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.357961 4703 scope.go:117] "RemoveContainer" containerID="97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c" Feb 02 13:18:12 crc kubenswrapper[4703]: E0202 13:18:12.358442 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c\": container with ID starting with 97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c not found: ID does not exist" containerID="97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.358493 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c"} err="failed to get container status \"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c\": rpc error: code = NotFound desc = could not find container \"97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c\": container with ID starting with 97eacfff2823399decb9a585fa44b181644b00fd0f9798c90a1148c0fae0ab7c not found: ID does not exist" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.358524 4703 scope.go:117] "RemoveContainer" containerID="efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30" Feb 02 13:18:12 crc kubenswrapper[4703]: E0202 13:18:12.358830 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30\": container with ID starting with efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30 not found: ID does not exist" containerID="efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.358862 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30"} err="failed to get container status \"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30\": rpc error: code = NotFound desc = could not find container \"efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30\": container with ID starting with efdb1560ebc2f789851e8d029290be6a5b4ccd3806b5d6d1a9949445d2ecde30 not found: ID does not exist" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.358882 4703 scope.go:117] "RemoveContainer" containerID="ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0" Feb 02 13:18:12 crc kubenswrapper[4703]: E0202 13:18:12.359144 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0\": container with ID starting with ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0 not found: ID does not exist" containerID="ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0" Feb 02 13:18:12 crc kubenswrapper[4703]: I0202 13:18:12.359173 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0"} err="failed to get container status \"ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0\": rpc error: code = NotFound desc = could not find container \"ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0\": container with ID starting with ce49139557ff9088d3fac0dcba4815674bf058943ea8b43ed224a98e028943c0 not found: ID does not exist" Feb 02 13:18:13 crc kubenswrapper[4703]: I0202 13:18:13.943157 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="4c522108-2465-4905-9703-5dfd173bafdb" path="/var/lib/kubelet/pods/4c522108-2465-4905-9703-5dfd173bafdb/volumes" Feb 02 13:18:15 crc kubenswrapper[4703]: I0202 13:18:15.984771 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:18:15 crc kubenswrapper[4703]: I0202 13:18:15.984835 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:18:45 crc kubenswrapper[4703]: I0202 13:18:45.984832 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:18:45 crc kubenswrapper[4703]: I0202 13:18:45.986452 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:18:46 crc kubenswrapper[4703]: I0202 13:18:46.512972 4703 generic.go:334] "Generic (PLEG): container finished" podID="237ae792-b6da-44bd-8618-7e84e3778277" containerID="b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa" exitCode=0 Feb 02 13:18:46 crc kubenswrapper[4703]: I0202 13:18:46.513035 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-tpk2h/must-gather-4258r" event={"ID":"237ae792-b6da-44bd-8618-7e84e3778277","Type":"ContainerDied","Data":"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa"} Feb 02 13:18:46 crc kubenswrapper[4703]: I0202 13:18:46.513685 4703 scope.go:117] "RemoveContainer" containerID="b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa" Feb 02 13:18:46 crc kubenswrapper[4703]: I0202 13:18:46.704039 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tpk2h_must-gather-4258r_237ae792-b6da-44bd-8618-7e84e3778277/gather/0.log" Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.456241 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-tpk2h/must-gather-4258r"] Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.456897 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-tpk2h/must-gather-4258r" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="copy" containerID="cri-o://6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319" gracePeriod=2 Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.460670 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-tpk2h/must-gather-4258r"] Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.867962 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tpk2h_must-gather-4258r_237ae792-b6da-44bd-8618-7e84e3778277/copy/0.log" Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.869149 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.972581 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output\") pod \"237ae792-b6da-44bd-8618-7e84e3778277\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.972691 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-5nvdv\" (UniqueName: \"kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv\") pod \"237ae792-b6da-44bd-8618-7e84e3778277\" (UID: \"237ae792-b6da-44bd-8618-7e84e3778277\") " Feb 02 13:18:53 crc kubenswrapper[4703]: I0202 13:18:53.977759 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv" (OuterVolumeSpecName: "kube-api-access-5nvdv") pod "237ae792-b6da-44bd-8618-7e84e3778277" (UID: "237ae792-b6da-44bd-8618-7e84e3778277"). InnerVolumeSpecName "kube-api-access-5nvdv". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.043372 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "237ae792-b6da-44bd-8618-7e84e3778277" (UID: "237ae792-b6da-44bd-8618-7e84e3778277"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.079939 4703 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/237ae792-b6da-44bd-8618-7e84e3778277-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.079976 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-5nvdv\" (UniqueName: \"kubernetes.io/projected/237ae792-b6da-44bd-8618-7e84e3778277-kube-api-access-5nvdv\") on node \"crc\" DevicePath \"\"" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.565895 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-tpk2h_must-gather-4258r_237ae792-b6da-44bd-8618-7e84e3778277/copy/0.log" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.566300 4703 generic.go:334] "Generic (PLEG): container finished" podID="237ae792-b6da-44bd-8618-7e84e3778277" containerID="6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319" exitCode=143 Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.566342 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-tpk2h/must-gather-4258r" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.566346 4703 scope.go:117] "RemoveContainer" containerID="6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.581257 4703 scope.go:117] "RemoveContainer" containerID="b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.611922 4703 scope.go:117] "RemoveContainer" containerID="6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319" Feb 02 13:18:54 crc kubenswrapper[4703]: E0202 13:18:54.612873 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319\": container with ID starting with 6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319 not found: ID does not exist" containerID="6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.612902 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319"} err="failed to get container status \"6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319\": rpc error: code = NotFound desc = could not find container \"6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319\": container with ID starting with 6b2bd5c1f129f399aa2757499b8cba50cd61796b77718a3705e45618eb6ce319 not found: ID does not exist" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.612932 4703 scope.go:117] "RemoveContainer" containerID="b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa" Feb 02 13:18:54 crc kubenswrapper[4703]: E0202 13:18:54.613263 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa\": container with ID starting with b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa not found: ID does not exist" containerID="b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa" Feb 02 13:18:54 crc kubenswrapper[4703]: I0202 13:18:54.613314 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa"} err="failed to get container status \"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa\": rpc error: code = NotFound desc = could not find container \"b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa\": container with ID starting with b8ee87a3e24ef3c0f378f63b804d9bc767385c2063a22e424dde049fde755baa not found: ID does not exist" Feb 02 13:18:55 crc kubenswrapper[4703]: I0202 13:18:55.942501 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="237ae792-b6da-44bd-8618-7e84e3778277" path="/var/lib/kubelet/pods/237ae792-b6da-44bd-8618-7e84e3778277/volumes" Feb 02 13:19:15 crc kubenswrapper[4703]: I0202 13:19:15.985795 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:19:15 crc kubenswrapper[4703]: I0202 13:19:15.987510 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:19:15 crc kubenswrapper[4703]: I0202 13:19:15.987589 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:19:15 crc kubenswrapper[4703]: I0202 13:19:15.988333 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:19:15 crc kubenswrapper[4703]: I0202 13:19:15.988388 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" gracePeriod=600 Feb 02 13:19:16 crc kubenswrapper[4703]: E0202 13:19:16.121385 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:19:16 crc kubenswrapper[4703]: I0202 13:19:16.912642 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" exitCode=0 Feb 02 13:19:16 crc kubenswrapper[4703]: I0202 13:19:16.912689 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744"} Feb 02 13:19:16 crc kubenswrapper[4703]: I0202 13:19:16.912750 4703 scope.go:117] "RemoveContainer" containerID="2d3f73bdd3e90d2ab94249da72d0912b4673b933b3dc1372b3fe24922995e76a" Feb 02 13:19:16 crc kubenswrapper[4703]: I0202 13:19:16.913264 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:19:16 crc kubenswrapper[4703]: E0202 13:19:16.913585 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:19:30 crc kubenswrapper[4703]: I0202 13:19:30.934308 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:19:30 crc kubenswrapper[4703]: E0202 13:19:30.935904 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.402368 4703 scope.go:117] "RemoveContainer" containerID="206a632afb2b7b2b6b746cef67a1e5d812e786097adf3ce7a1e755e50373396e" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.427096 4703 scope.go:117] "RemoveContainer" containerID="2234618673f4a5d42c1f0957e677fba837ff4a29f53b64e0d0f28ef861c78dd7" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.440893 4703 scope.go:117] "RemoveContainer" containerID="449f7488b9d6b7c7abd9737282ba124b1c541003454f2e8499d9d271b9aca602" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.510945 4703 scope.go:117] "RemoveContainer" containerID="f2d2e17bf7340fe1607f4b2de00210124c8bba280bcfaa908b41230bc7ef167d" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.530808 4703 scope.go:117] "RemoveContainer" containerID="5253613ef8830ff6e4c45f7fc6033227ae263832b3cc56b5b7de0cef105fb946" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.545091 4703 scope.go:117] "RemoveContainer" containerID="19b0919e9bfb8a13878c1b958f9cf3fe5197f70c3f96fe8350c9dc228898f5ee" Feb 02 13:19:39 crc kubenswrapper[4703]: I0202 13:19:39.566978 4703 scope.go:117] "RemoveContainer" containerID="2a6d5fd32f54080c9373342dba8867eb91db2ed2acb08197eebfeeac7dae899d" Feb 02 13:19:42 crc kubenswrapper[4703]: I0202 13:19:42.934301 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:19:42 crc kubenswrapper[4703]: E0202 13:19:42.935104 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:19:53 crc kubenswrapper[4703]: I0202 13:19:53.936593 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:19:53 crc kubenswrapper[4703]: E0202 13:19:53.937323 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:20:06 crc kubenswrapper[4703]: I0202 13:20:06.934077 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:20:06 crc kubenswrapper[4703]: E0202 13:20:06.934836 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:20:17 crc kubenswrapper[4703]: I0202 13:20:17.934641 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:20:17 crc kubenswrapper[4703]: E0202 13:20:17.935513 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:20:30 crc kubenswrapper[4703]: I0202 13:20:30.934170 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:20:30 crc kubenswrapper[4703]: E0202 13:20:30.935214 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:20:39 crc kubenswrapper[4703]: I0202 13:20:39.655255 4703 scope.go:117] "RemoveContainer" containerID="bcc6c65d82660bda38b69146c5e9a2d927b08144003f311a114f87e61b87f83f" Feb 02 13:20:39 crc kubenswrapper[4703]: I0202 13:20:39.688073 4703 scope.go:117] "RemoveContainer" containerID="577887e8178672ae76d1175143f549e287d8a12ad99c51f5ad101e0260d6f4dd" Feb 02 13:20:39 crc kubenswrapper[4703]: I0202 13:20:39.706203 4703 scope.go:117] "RemoveContainer" containerID="fd3a33e2fb2a5300e2e30b1fc9393d041b9e1e4ab7b75f62b9abb1b5b8c0553c" Feb 02 13:20:43 crc kubenswrapper[4703]: I0202 13:20:43.936800 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:20:43 crc kubenswrapper[4703]: E0202 13:20:43.937593 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:20:55 crc kubenswrapper[4703]: I0202 13:20:55.933481 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:20:55 crc kubenswrapper[4703]: E0202 13:20:55.934194 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:21:07 crc kubenswrapper[4703]: I0202 13:21:07.934222 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:21:07 crc kubenswrapper[4703]: E0202 13:21:07.935337 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:21:18 crc kubenswrapper[4703]: I0202 13:21:18.934605 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:21:18 crc kubenswrapper[4703]: E0202 13:21:18.935478 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.167582 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-must-gather-m8qdz/must-gather-9sjs4"] Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168197 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="gather" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168209 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="gather" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168220 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="copy" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168226 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="copy" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168232 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168241 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168252 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="extract-content" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168257 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="extract-content" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168264 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="extract-utilities" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168272 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="extract-utilities" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168297 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168303 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168311 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="extract-content" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168316 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="extract-content" Feb 02 13:21:23 crc kubenswrapper[4703]: E0202 13:21:23.168332 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="extract-utilities" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168338 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="extract-utilities" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168429 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="gather" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168440 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="4c522108-2465-4905-9703-5dfd173bafdb" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168448 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="c0807450-8cb3-4b02-9435-33f9db39497d" containerName="registry-server" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168458 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="237ae792-b6da-44bd-8618-7e84e3778277" containerName="copy" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.168976 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.172446 4703 reflector.go:368] Caches populated for *v1.Secret from object-"openshift-must-gather-m8qdz"/"default-dockercfg-kxsgp" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.172831 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m8qdz"/"kube-root-ca.crt" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.173226 4703 reflector.go:368] Caches populated for *v1.ConfigMap from object-"openshift-must-gather-m8qdz"/"openshift-service-ca.crt" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.195118 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m8qdz/must-gather-9sjs4"] Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.282397 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-4bgbs\" (UniqueName: \"kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.282563 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.384254 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.384435 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-4bgbs\" (UniqueName: \"kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.385216 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.402904 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-4bgbs\" (UniqueName: \"kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs\") pod \"must-gather-9sjs4\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.483810 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:21:23 crc kubenswrapper[4703]: I0202 13:21:23.694664 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-must-gather-m8qdz/must-gather-9sjs4"] Feb 02 13:21:23 crc kubenswrapper[4703]: W0202 13:21:23.709467 4703 manager.go:1169] Failed to process watch event {EventType:0 Name:/kubepods.slice/kubepods-besteffort.slice/kubepods-besteffort-poda71f4f29_8b10_4cab_b758_8986ae9e1ce1.slice/crio-7700c5845db0b2b749fbec897ce1e023d5b0f55d391788a8c8bb90dc48c7790f WatchSource:0}: Error finding container 7700c5845db0b2b749fbec897ce1e023d5b0f55d391788a8c8bb90dc48c7790f: Status 404 returned error can't find the container with id 7700c5845db0b2b749fbec897ce1e023d5b0f55d391788a8c8bb90dc48c7790f Feb 02 13:21:24 crc kubenswrapper[4703]: I0202 13:21:24.672714 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" event={"ID":"a71f4f29-8b10-4cab-b758-8986ae9e1ce1","Type":"ContainerStarted","Data":"787a751a4900593cbc97663e475025c8957cef4742983d0ad7fecda71208d236"} Feb 02 13:21:24 crc kubenswrapper[4703]: I0202 13:21:24.673497 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" event={"ID":"a71f4f29-8b10-4cab-b758-8986ae9e1ce1","Type":"ContainerStarted","Data":"2ddab3fa6cb86b44bd07018cac519857264fbf6ecb56fec223704377a9437565"} Feb 02 13:21:24 crc kubenswrapper[4703]: I0202 13:21:24.673545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" event={"ID":"a71f4f29-8b10-4cab-b758-8986ae9e1ce1","Type":"ContainerStarted","Data":"7700c5845db0b2b749fbec897ce1e023d5b0f55d391788a8c8bb90dc48c7790f"} Feb 02 13:21:31 crc kubenswrapper[4703]: I0202 13:21:31.934728 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:21:31 crc kubenswrapper[4703]: E0202 13:21:31.935641 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:21:46 crc kubenswrapper[4703]: I0202 13:21:46.933992 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:21:46 crc kubenswrapper[4703]: E0202 13:21:46.935142 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:22:00 crc kubenswrapper[4703]: I0202 13:22:00.933930 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:22:00 crc kubenswrapper[4703]: E0202 13:22:00.934796 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:22:08 crc kubenswrapper[4703]: I0202 13:22:08.618712 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_control-plane-machine-set-operator-78cbb6b69f-z7cwj_71a6d661-54a6-430a-902b-656503b8d12a/control-plane-machine-set-operator/0.log" Feb 02 13:22:08 crc kubenswrapper[4703]: I0202 13:22:08.728209 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bsrg2_335fb805-5368-4ab2-bdb6-d642aeb11902/kube-rbac-proxy/0.log" Feb 02 13:22:08 crc kubenswrapper[4703]: I0202 13:22:08.797391 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-machine-api_machine-api-operator-5694c8668f-bsrg2_335fb805-5368-4ab2-bdb6-d642aeb11902/machine-api-operator/0.log" Feb 02 13:22:15 crc kubenswrapper[4703]: I0202 13:22:15.934180 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:22:15 crc kubenswrapper[4703]: E0202 13:22:15.934923 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:22:29 crc kubenswrapper[4703]: I0202 13:22:29.934625 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:22:29 crc kubenswrapper[4703]: E0202 13:22:29.935483 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.542850 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-wxlfd_5fc3b818-d481-4eb8-b065-b03384d4f164/kube-rbac-proxy/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.558703 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_controller-6968d8fdc4-wxlfd_5fc3b818-d481-4eb8-b065-b03384d4f164/controller/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.731384 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.904597 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.910376 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.927235 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:22:34 crc kubenswrapper[4703]: I0202 13:22:34.930709 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.095026 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.105568 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.114450 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.116943 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.256982 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-reloader/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.279596 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-metrics/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.285718 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/cp-frr-files/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.292864 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/controller/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.453443 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/frr-metrics/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.469733 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/kube-rbac-proxy-frr/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.494625 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/kube-rbac-proxy/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.640476 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/reloader/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.723101 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-webhook-server-7df86c4f6c-dfdkw_1ff8e6a8-2795-4cb8-9550-75b3129ef6b4/frr-k8s-webhook-server/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.874556 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-controller-manager-54d9d5b5d6-wdjdr_8d876cf0-0b55-4941-9b60-7258381875ec/manager/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.897137 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_frr-k8s-bqvnt_3ead451e-1f12-417c-9bcf-59722dedbb65/frr/0.log" Feb 02 13:22:35 crc kubenswrapper[4703]: I0202 13:22:35.995492 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_metallb-operator-webhook-server-65699778c9-782nc_9c3d0481-1b4c-4d06-8605-bb6079b162b8/webhook-server/0.log" Feb 02 13:22:36 crc kubenswrapper[4703]: I0202 13:22:36.063796 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zgzbl_622b2024-9b5b-4964-aed7-52b15352cd1d/kube-rbac-proxy/0.log" Feb 02 13:22:36 crc kubenswrapper[4703]: I0202 13:22:36.170508 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/metallb-system_speaker-zgzbl_622b2024-9b5b-4964-aed7-52b15352cd1d/speaker/0.log" Feb 02 13:22:44 crc kubenswrapper[4703]: I0202 13:22:44.934158 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:22:44 crc kubenswrapper[4703]: E0202 13:22:44.934889 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:22:58 crc kubenswrapper[4703]: I0202 13:22:58.934235 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:22:58 crc kubenswrapper[4703]: E0202 13:22:58.935095 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.198938 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.378268 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.379180 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.385695 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.556700 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/extract/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.559448 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/util/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.565187 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_270996307cd21d144be796860235064b5127c2fcf62ccccd6689c259dcfbz2c_d06ca331-ef2a-42ae-a81d-286ae08693e5/pull/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.723703 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-utilities/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.898042 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-utilities/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.901299 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-content/0.log" Feb 02 13:23:00 crc kubenswrapper[4703]: I0202 13:23:00.930126 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-content/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.129333 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-content/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.138754 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/extract-utilities/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.220191 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_certified-operators-g77z4_b56a5aee-bc7b-4d7d-83a0-50292348f157/registry-server/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.304450 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.533067 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.538853 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.563036 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.694970 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-content/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.699025 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/extract-utilities/0.log" Feb 02 13:23:01 crc kubenswrapper[4703]: I0202 13:23:01.885286 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_marketplace-operator-79b997595-tbv5t_71ff2212-c3fe-45e2-87ef-a4d4e19ce91d/marketplace-operator/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.020119 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.185415 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_community-operators-m8xxx_1a77f425-fdf2-44a5-8fac-d41dc0307d14/registry-server/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.226370 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.239000 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.274371 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.425351 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-utilities/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.457779 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/extract-content/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.537625 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-marketplace-kg6h4_3cd4a4fe-0b9f-4f64-834a-df0a1f6a3bb3/registry-server/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.634139 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.899484 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.924388 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:23:02 crc kubenswrapper[4703]: I0202 13:23:02.933557 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:23:03 crc kubenswrapper[4703]: I0202 13:23:03.102039 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-content/0.log" Feb 02 13:23:03 crc kubenswrapper[4703]: I0202 13:23:03.109465 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/extract-utilities/0.log" Feb 02 13:23:03 crc kubenswrapper[4703]: I0202 13:23:03.511894 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-marketplace_redhat-operators-grsx4_4d7077d1-f842-4918-83cd-cf46ec77a8b7/registry-server/0.log" Feb 02 13:23:12 crc kubenswrapper[4703]: I0202 13:23:12.933309 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:23:12 crc kubenswrapper[4703]: E0202 13:23:12.933980 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:23:23 crc kubenswrapper[4703]: I0202 13:23:23.939609 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:23:23 crc kubenswrapper[4703]: E0202 13:23:23.942209 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:23:36 crc kubenswrapper[4703]: I0202 13:23:36.933625 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:23:36 crc kubenswrapper[4703]: E0202 13:23:36.935304 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:23:49 crc kubenswrapper[4703]: I0202 13:23:49.934611 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:23:49 crc kubenswrapper[4703]: E0202 13:23:49.935245 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:24:00 crc kubenswrapper[4703]: I0202 13:24:00.933573 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:24:00 crc kubenswrapper[4703]: E0202 13:24:00.935541 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:24:13 crc kubenswrapper[4703]: I0202 13:24:13.938110 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:24:13 crc kubenswrapper[4703]: E0202 13:24:13.939120 4703 pod_workers.go:1301] "Error syncing pod, skipping" err="failed to \"StartContainer\" for \"machine-config-daemon\" with CrashLoopBackOff: \"back-off 5m0s restarting failed container=machine-config-daemon pod=machine-config-daemon-2vnzs_openshift-machine-config-operator(c18a759f-5f28-4f90-866f-8f90476ba69c)\"" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" Feb 02 13:24:15 crc kubenswrapper[4703]: I0202 13:24:15.831325 4703 generic.go:334] "Generic (PLEG): container finished" podID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerID="2ddab3fa6cb86b44bd07018cac519857264fbf6ecb56fec223704377a9437565" exitCode=0 Feb 02 13:24:15 crc kubenswrapper[4703]: I0202 13:24:15.831379 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" event={"ID":"a71f4f29-8b10-4cab-b758-8986ae9e1ce1","Type":"ContainerDied","Data":"2ddab3fa6cb86b44bd07018cac519857264fbf6ecb56fec223704377a9437565"} Feb 02 13:24:15 crc kubenswrapper[4703]: I0202 13:24:15.832200 4703 scope.go:117] "RemoveContainer" containerID="2ddab3fa6cb86b44bd07018cac519857264fbf6ecb56fec223704377a9437565" Feb 02 13:24:16 crc kubenswrapper[4703]: I0202 13:24:16.615798 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m8qdz_must-gather-9sjs4_a71f4f29-8b10-4cab-b758-8986ae9e1ce1/gather/0.log" Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.602662 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-must-gather-m8qdz/must-gather-9sjs4"] Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.603602 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="copy" containerID="cri-o://787a751a4900593cbc97663e475025c8957cef4742983d0ad7fecda71208d236" gracePeriod=2 Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.609027 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-must-gather-m8qdz/must-gather-9sjs4"] Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.890013 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m8qdz_must-gather-9sjs4_a71f4f29-8b10-4cab-b758-8986ae9e1ce1/copy/0.log" Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.890648 4703 generic.go:334] "Generic (PLEG): container finished" podID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerID="787a751a4900593cbc97663e475025c8957cef4742983d0ad7fecda71208d236" exitCode=143 Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.939921 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m8qdz_must-gather-9sjs4_a71f4f29-8b10-4cab-b758-8986ae9e1ce1/copy/0.log" Feb 02 13:24:25 crc kubenswrapper[4703]: I0202 13:24:25.944130 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.126546 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output\") pod \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.126603 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-4bgbs\" (UniqueName: \"kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs\") pod \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\" (UID: \"a71f4f29-8b10-4cab-b758-8986ae9e1ce1\") " Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.133816 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs" (OuterVolumeSpecName: "kube-api-access-4bgbs") pod "a71f4f29-8b10-4cab-b758-8986ae9e1ce1" (UID: "a71f4f29-8b10-4cab-b758-8986ae9e1ce1"). InnerVolumeSpecName "kube-api-access-4bgbs". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.191842 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output" (OuterVolumeSpecName: "must-gather-output") pod "a71f4f29-8b10-4cab-b758-8986ae9e1ce1" (UID: "a71f4f29-8b10-4cab-b758-8986ae9e1ce1"). InnerVolumeSpecName "must-gather-output". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.228209 4703 reconciler_common.go:293] "Volume detached for volume \"must-gather-output\" (UniqueName: \"kubernetes.io/empty-dir/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-must-gather-output\") on node \"crc\" DevicePath \"\"" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.228251 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-4bgbs\" (UniqueName: \"kubernetes.io/projected/a71f4f29-8b10-4cab-b758-8986ae9e1ce1-kube-api-access-4bgbs\") on node \"crc\" DevicePath \"\"" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.897507 4703 log.go:25] "Finished parsing log file" path="/var/log/pods/openshift-must-gather-m8qdz_must-gather-9sjs4_a71f4f29-8b10-4cab-b758-8986ae9e1ce1/copy/0.log" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.899394 4703 scope.go:117] "RemoveContainer" containerID="787a751a4900593cbc97663e475025c8957cef4742983d0ad7fecda71208d236" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.899576 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-must-gather-m8qdz/must-gather-9sjs4" Feb 02 13:24:26 crc kubenswrapper[4703]: I0202 13:24:26.915436 4703 scope.go:117] "RemoveContainer" containerID="2ddab3fa6cb86b44bd07018cac519857264fbf6ecb56fec223704377a9437565" Feb 02 13:24:27 crc kubenswrapper[4703]: I0202 13:24:27.941633 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" path="/var/lib/kubelet/pods/a71f4f29-8b10-4cab-b758-8986ae9e1ce1/volumes" Feb 02 13:24:28 crc kubenswrapper[4703]: I0202 13:24:28.934373 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:24:29 crc kubenswrapper[4703]: I0202 13:24:29.919101 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"53630a38cd844d9d7f85bf2b2c2ac452874354319828d8be0d0b6978e4b173e0"} Feb 02 13:26:45 crc kubenswrapper[4703]: I0202 13:26:45.985067 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:26:45 crc kubenswrapper[4703]: I0202 13:26:45.985599 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.989127 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:02 crc kubenswrapper[4703]: E0202 13:27:02.989961 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="gather" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.989976 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="gather" Feb 02 13:27:02 crc kubenswrapper[4703]: E0202 13:27:02.989999 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="copy" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.990008 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="copy" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.990132 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="gather" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.990150 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="a71f4f29-8b10-4cab-b758-8986ae9e1ce1" containerName="copy" Feb 02 13:27:02 crc kubenswrapper[4703]: I0202 13:27:02.991106 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.006677 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.179542 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kv48z\" (UniqueName: \"kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.179914 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.179945 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.280956 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kv48z\" (UniqueName: \"kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.281007 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.281036 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.281604 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.281546 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.302413 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kv48z\" (UniqueName: \"kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z\") pod \"redhat-marketplace-2sl2m\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.312849 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.790706 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.991542 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerStarted","Data":"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3"} Feb 02 13:27:03 crc kubenswrapper[4703]: I0202 13:27:03.992667 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerStarted","Data":"4376487817c2e6c29bc51671e1b251579f5cc8a976ed9af2c4ed7ff0f8f5c762"} Feb 02 13:27:05 crc kubenswrapper[4703]: I0202 13:27:05.000389 4703 generic.go:334] "Generic (PLEG): container finished" podID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerID="f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3" exitCode=0 Feb 02 13:27:05 crc kubenswrapper[4703]: I0202 13:27:05.000474 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerDied","Data":"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3"} Feb 02 13:27:05 crc kubenswrapper[4703]: I0202 13:27:05.003160 4703 provider.go:102] Refreshing cache for provider: *credentialprovider.defaultDockerConfigProvider Feb 02 13:27:06 crc kubenswrapper[4703]: I0202 13:27:06.009310 4703 generic.go:334] "Generic (PLEG): container finished" podID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerID="88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242" exitCode=0 Feb 02 13:27:06 crc kubenswrapper[4703]: I0202 13:27:06.009400 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerDied","Data":"88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242"} Feb 02 13:27:07 crc kubenswrapper[4703]: I0202 13:27:07.016207 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerStarted","Data":"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719"} Feb 02 13:27:13 crc kubenswrapper[4703]: I0202 13:27:13.314417 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:13 crc kubenswrapper[4703]: I0202 13:27:13.314973 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:13 crc kubenswrapper[4703]: I0202 13:27:13.382562 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:13 crc kubenswrapper[4703]: I0202 13:27:13.400180 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/redhat-marketplace-2sl2m" podStartSLOduration=9.982900375 podStartE2EDuration="11.400161478s" podCreationTimestamp="2026-02-02 13:27:02 +0000 UTC" firstStartedPulling="2026-02-02 13:27:05.002796315 +0000 UTC m=+2152.018003859" lastFinishedPulling="2026-02-02 13:27:06.420057428 +0000 UTC m=+2153.435264962" observedRunningTime="2026-02-02 13:27:07.033757375 +0000 UTC m=+2154.048964909" watchObservedRunningTime="2026-02-02 13:27:13.400161478 +0000 UTC m=+2160.415369012" Feb 02 13:27:14 crc kubenswrapper[4703]: I0202 13:27:14.087490 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:14 crc kubenswrapper[4703]: I0202 13:27:14.135570 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:15 crc kubenswrapper[4703]: I0202 13:27:15.985557 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:27:15 crc kubenswrapper[4703]: I0202 13:27:15.985900 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.064575 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/redhat-marketplace-2sl2m" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="registry-server" containerID="cri-o://62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719" gracePeriod=2 Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.418835 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.547698 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content\") pod \"6331afbf-68c9-4b50-94bb-0e5c5902644d\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.547762 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities\") pod \"6331afbf-68c9-4b50-94bb-0e5c5902644d\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.547836 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kv48z\" (UniqueName: \"kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z\") pod \"6331afbf-68c9-4b50-94bb-0e5c5902644d\" (UID: \"6331afbf-68c9-4b50-94bb-0e5c5902644d\") " Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.548724 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities" (OuterVolumeSpecName: "utilities") pod "6331afbf-68c9-4b50-94bb-0e5c5902644d" (UID: "6331afbf-68c9-4b50-94bb-0e5c5902644d"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.563852 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z" (OuterVolumeSpecName: "kube-api-access-kv48z") pod "6331afbf-68c9-4b50-94bb-0e5c5902644d" (UID: "6331afbf-68c9-4b50-94bb-0e5c5902644d"). InnerVolumeSpecName "kube-api-access-kv48z". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.570861 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "6331afbf-68c9-4b50-94bb-0e5c5902644d" (UID: "6331afbf-68c9-4b50-94bb-0e5c5902644d"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.649542 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kv48z\" (UniqueName: \"kubernetes.io/projected/6331afbf-68c9-4b50-94bb-0e5c5902644d-kube-api-access-kv48z\") on node \"crc\" DevicePath \"\"" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.649600 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:27:16 crc kubenswrapper[4703]: I0202 13:27:16.649616 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/6331afbf-68c9-4b50-94bb-0e5c5902644d-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.071346 4703 generic.go:334] "Generic (PLEG): container finished" podID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerID="62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719" exitCode=0 Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.071388 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerDied","Data":"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719"} Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.071418 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/redhat-marketplace-2sl2m" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.071446 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/redhat-marketplace-2sl2m" event={"ID":"6331afbf-68c9-4b50-94bb-0e5c5902644d","Type":"ContainerDied","Data":"4376487817c2e6c29bc51671e1b251579f5cc8a976ed9af2c4ed7ff0f8f5c762"} Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.071467 4703 scope.go:117] "RemoveContainer" containerID="62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.099997 4703 scope.go:117] "RemoveContainer" containerID="88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.111571 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.117997 4703 scope.go:117] "RemoveContainer" containerID="f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.118644 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/redhat-marketplace-2sl2m"] Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.134010 4703 scope.go:117] "RemoveContainer" containerID="62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719" Feb 02 13:27:17 crc kubenswrapper[4703]: E0202 13:27:17.136441 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719\": container with ID starting with 62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719 not found: ID does not exist" containerID="62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.136482 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719"} err="failed to get container status \"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719\": rpc error: code = NotFound desc = could not find container \"62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719\": container with ID starting with 62d528c5be0d0f9cb9947e4d9629d6012c60999bcb893c0d996bd5620c338719 not found: ID does not exist" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.136506 4703 scope.go:117] "RemoveContainer" containerID="88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242" Feb 02 13:27:17 crc kubenswrapper[4703]: E0202 13:27:17.136838 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242\": container with ID starting with 88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242 not found: ID does not exist" containerID="88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.136912 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242"} err="failed to get container status \"88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242\": rpc error: code = NotFound desc = could not find container \"88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242\": container with ID starting with 88975487fbb9240bbcf316a6f3fd75a03bd89dfabc2d978e9ee96ef0b131f242 not found: ID does not exist" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.136970 4703 scope.go:117] "RemoveContainer" containerID="f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3" Feb 02 13:27:17 crc kubenswrapper[4703]: E0202 13:27:17.137520 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3\": container with ID starting with f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3 not found: ID does not exist" containerID="f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.137573 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3"} err="failed to get container status \"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3\": rpc error: code = NotFound desc = could not find container \"f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3\": container with ID starting with f3d610c727ee2fdfe696640746f50b68560eb409eb13a90088f1a0d3db8e0fe3 not found: ID does not exist" Feb 02 13:27:17 crc kubenswrapper[4703]: I0202 13:27:17.940470 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" path="/var/lib/kubelet/pods/6331afbf-68c9-4b50-94bb-0e5c5902644d/volumes" Feb 02 13:27:45 crc kubenswrapper[4703]: I0202 13:27:45.984582 4703 patch_prober.go:28] interesting pod/machine-config-daemon-2vnzs container/machine-config-daemon namespace/openshift-machine-config-operator: Liveness probe status=failure output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" start-of-body= Feb 02 13:27:45 crc kubenswrapper[4703]: I0202 13:27:45.986181 4703 prober.go:107] "Probe failed" probeType="Liveness" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" probeResult="failure" output="Get \"http://127.0.0.1:8798/health\": dial tcp 127.0.0.1:8798: connect: connection refused" Feb 02 13:27:45 crc kubenswrapper[4703]: I0202 13:27:45.986374 4703 kubelet.go:2542] "SyncLoop (probe)" probe="liveness" status="unhealthy" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" Feb 02 13:27:45 crc kubenswrapper[4703]: I0202 13:27:45.987093 4703 kuberuntime_manager.go:1027] "Message for Container of pod" containerName="machine-config-daemon" containerStatusID={"Type":"cri-o","ID":"53630a38cd844d9d7f85bf2b2c2ac452874354319828d8be0d0b6978e4b173e0"} pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" containerMessage="Container machine-config-daemon failed liveness probe, will be restarted" Feb 02 13:27:45 crc kubenswrapper[4703]: I0202 13:27:45.987243 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" podUID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerName="machine-config-daemon" containerID="cri-o://53630a38cd844d9d7f85bf2b2c2ac452874354319828d8be0d0b6978e4b173e0" gracePeriod=600 Feb 02 13:27:46 crc kubenswrapper[4703]: I0202 13:27:46.310310 4703 generic.go:334] "Generic (PLEG): container finished" podID="c18a759f-5f28-4f90-866f-8f90476ba69c" containerID="53630a38cd844d9d7f85bf2b2c2ac452874354319828d8be0d0b6978e4b173e0" exitCode=0 Feb 02 13:27:46 crc kubenswrapper[4703]: I0202 13:27:46.310311 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerDied","Data":"53630a38cd844d9d7f85bf2b2c2ac452874354319828d8be0d0b6978e4b173e0"} Feb 02 13:27:46 crc kubenswrapper[4703]: I0202 13:27:46.310728 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-machine-config-operator/machine-config-daemon-2vnzs" event={"ID":"c18a759f-5f28-4f90-866f-8f90476ba69c","Type":"ContainerStarted","Data":"36c44e19b3aec42f22770d6e1461b5ba78f758d41d0cc96dbda5b7fdface88db"} Feb 02 13:27:46 crc kubenswrapper[4703]: I0202 13:27:46.310754 4703 scope.go:117] "RemoveContainer" containerID="6abb43bd894b319949f167d18fed18cee0f9d46e829da34fcc61ca8e7ff99744" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.738221 4703 kubelet.go:2421] "SyncLoop ADD" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:27:57 crc kubenswrapper[4703]: E0202 13:27:57.738675 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="extract-content" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.738688 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="extract-content" Feb 02 13:27:57 crc kubenswrapper[4703]: E0202 13:27:57.738704 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="registry-server" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.738712 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="registry-server" Feb 02 13:27:57 crc kubenswrapper[4703]: E0202 13:27:57.738724 4703 cpu_manager.go:410] "RemoveStaleState: removing container" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="extract-utilities" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.738731 4703 state_mem.go:107] "Deleted CPUSet assignment" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="extract-utilities" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.738831 4703 memory_manager.go:354] "RemoveStaleState removing state" podUID="6331afbf-68c9-4b50-94bb-0e5c5902644d" containerName="registry-server" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.739555 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.751185 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.765894 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"kube-api-access-kqpc7\" (UniqueName: \"kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.765938 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.766003 4703 reconciler_common.go:245] "operationExecutor.VerifyControllerAttachedVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.867220 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.867585 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"kube-api-access-kqpc7\" (UniqueName: \"kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.867669 4703 reconciler_common.go:218] "operationExecutor.MountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.867719 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.867927 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:57 crc kubenswrapper[4703]: I0202 13:27:57.888134 4703 operation_generator.go:637] "MountVolume.SetUp succeeded for volume \"kube-api-access-kqpc7\" (UniqueName: \"kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7\") pod \"community-operators-l7xlr\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:58 crc kubenswrapper[4703]: I0202 13:27:58.054806 4703 util.go:30] "No sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:27:58 crc kubenswrapper[4703]: I0202 13:27:58.285382 4703 kubelet.go:2428] "SyncLoop UPDATE" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:27:58 crc kubenswrapper[4703]: I0202 13:27:58.375828 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerStarted","Data":"7f3f66ae033fba95e2006284a39af75b98f0ba4a9372b67e22408cb85d280860"} Feb 02 13:27:59 crc kubenswrapper[4703]: I0202 13:27:59.384459 4703 generic.go:334] "Generic (PLEG): container finished" podID="5dde4d09-91cf-4235-91ed-d6593762fb77" containerID="900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d" exitCode=0 Feb 02 13:27:59 crc kubenswrapper[4703]: I0202 13:27:59.384545 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerDied","Data":"900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d"} Feb 02 13:28:00 crc kubenswrapper[4703]: I0202 13:28:00.393083 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerStarted","Data":"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6"} Feb 02 13:28:01 crc kubenswrapper[4703]: I0202 13:28:01.401981 4703 generic.go:334] "Generic (PLEG): container finished" podID="5dde4d09-91cf-4235-91ed-d6593762fb77" containerID="475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6" exitCode=0 Feb 02 13:28:01 crc kubenswrapper[4703]: I0202 13:28:01.402024 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerDied","Data":"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6"} Feb 02 13:28:02 crc kubenswrapper[4703]: I0202 13:28:02.410771 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerStarted","Data":"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec"} Feb 02 13:28:02 crc kubenswrapper[4703]: I0202 13:28:02.436086 4703 pod_startup_latency_tracker.go:104] "Observed pod startup duration" pod="openshift-marketplace/community-operators-l7xlr" podStartSLOduration=2.823264153 podStartE2EDuration="5.436063826s" podCreationTimestamp="2026-02-02 13:27:57 +0000 UTC" firstStartedPulling="2026-02-02 13:27:59.386634159 +0000 UTC m=+2206.401841693" lastFinishedPulling="2026-02-02 13:28:01.999433832 +0000 UTC m=+2209.014641366" observedRunningTime="2026-02-02 13:28:02.431569747 +0000 UTC m=+2209.446777271" watchObservedRunningTime="2026-02-02 13:28:02.436063826 +0000 UTC m=+2209.451271370" Feb 02 13:28:08 crc kubenswrapper[4703]: I0202 13:28:08.055883 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:08 crc kubenswrapper[4703]: I0202 13:28:08.056540 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="unhealthy" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:08 crc kubenswrapper[4703]: I0202 13:28:08.110095 4703 kubelet.go:2542] "SyncLoop (probe)" probe="startup" status="started" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:08 crc kubenswrapper[4703]: I0202 13:28:08.477712 4703 kubelet.go:2542] "SyncLoop (probe)" probe="readiness" status="ready" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:08 crc kubenswrapper[4703]: I0202 13:28:08.520081 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.452110 4703 kuberuntime_container.go:808] "Killing container with a grace period" pod="openshift-marketplace/community-operators-l7xlr" podUID="5dde4d09-91cf-4235-91ed-d6593762fb77" containerName="registry-server" containerID="cri-o://13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec" gracePeriod=2 Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.785028 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.848787 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities\") pod \"5dde4d09-91cf-4235-91ed-d6593762fb77\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.849155 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content\") pod \"5dde4d09-91cf-4235-91ed-d6593762fb77\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.849288 4703 reconciler_common.go:159] "operationExecutor.UnmountVolume started for volume \"kube-api-access-kqpc7\" (UniqueName: \"kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7\") pod \"5dde4d09-91cf-4235-91ed-d6593762fb77\" (UID: \"5dde4d09-91cf-4235-91ed-d6593762fb77\") " Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.849968 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities" (OuterVolumeSpecName: "utilities") pod "5dde4d09-91cf-4235-91ed-d6593762fb77" (UID: "5dde4d09-91cf-4235-91ed-d6593762fb77"). InnerVolumeSpecName "utilities". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.854639 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7" (OuterVolumeSpecName: "kube-api-access-kqpc7") pod "5dde4d09-91cf-4235-91ed-d6593762fb77" (UID: "5dde4d09-91cf-4235-91ed-d6593762fb77"). InnerVolumeSpecName "kube-api-access-kqpc7". PluginName "kubernetes.io/projected", VolumeGidValue "" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.905621 4703 operation_generator.go:803] UnmountVolume.TearDown succeeded for volume "kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content" (OuterVolumeSpecName: "catalog-content") pod "5dde4d09-91cf-4235-91ed-d6593762fb77" (UID: "5dde4d09-91cf-4235-91ed-d6593762fb77"). InnerVolumeSpecName "catalog-content". PluginName "kubernetes.io/empty-dir", VolumeGidValue "" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.951244 4703 reconciler_common.go:293] "Volume detached for volume \"utilities\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-utilities\") on node \"crc\" DevicePath \"\"" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.951328 4703 reconciler_common.go:293] "Volume detached for volume \"catalog-content\" (UniqueName: \"kubernetes.io/empty-dir/5dde4d09-91cf-4235-91ed-d6593762fb77-catalog-content\") on node \"crc\" DevicePath \"\"" Feb 02 13:28:10 crc kubenswrapper[4703]: I0202 13:28:10.951347 4703 reconciler_common.go:293] "Volume detached for volume \"kube-api-access-kqpc7\" (UniqueName: \"kubernetes.io/projected/5dde4d09-91cf-4235-91ed-d6593762fb77-kube-api-access-kqpc7\") on node \"crc\" DevicePath \"\"" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.460319 4703 generic.go:334] "Generic (PLEG): container finished" podID="5dde4d09-91cf-4235-91ed-d6593762fb77" containerID="13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec" exitCode=0 Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.460400 4703 util.go:48] "No ready sandbox for pod can be found. Need to start a new one" pod="openshift-marketplace/community-operators-l7xlr" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.460395 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerDied","Data":"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec"} Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.460778 4703 kubelet.go:2453] "SyncLoop (PLEG): event for pod" pod="openshift-marketplace/community-operators-l7xlr" event={"ID":"5dde4d09-91cf-4235-91ed-d6593762fb77","Type":"ContainerDied","Data":"7f3f66ae033fba95e2006284a39af75b98f0ba4a9372b67e22408cb85d280860"} Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.460804 4703 scope.go:117] "RemoveContainer" containerID="13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.479683 4703 scope.go:117] "RemoveContainer" containerID="475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.500350 4703 kubelet.go:2437] "SyncLoop DELETE" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.500408 4703 kubelet.go:2431] "SyncLoop REMOVE" source="api" pods=["openshift-marketplace/community-operators-l7xlr"] Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.518424 4703 scope.go:117] "RemoveContainer" containerID="900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.532921 4703 scope.go:117] "RemoveContainer" containerID="13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec" Feb 02 13:28:11 crc kubenswrapper[4703]: E0202 13:28:11.533294 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec\": container with ID starting with 13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec not found: ID does not exist" containerID="13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.533335 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec"} err="failed to get container status \"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec\": rpc error: code = NotFound desc = could not find container \"13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec\": container with ID starting with 13ee261bf71bde1fc15ffa8bc32ca84497257f4db4442b9dced81a0e44a8e9ec not found: ID does not exist" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.533354 4703 scope.go:117] "RemoveContainer" containerID="475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6" Feb 02 13:28:11 crc kubenswrapper[4703]: E0202 13:28:11.533589 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6\": container with ID starting with 475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6 not found: ID does not exist" containerID="475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.533615 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6"} err="failed to get container status \"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6\": rpc error: code = NotFound desc = could not find container \"475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6\": container with ID starting with 475dea64d6fdfb687612f97a8e5c8d9e4efc39b3b069458c9f147e6401421bf6 not found: ID does not exist" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.533636 4703 scope.go:117] "RemoveContainer" containerID="900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d" Feb 02 13:28:11 crc kubenswrapper[4703]: E0202 13:28:11.533862 4703 log.go:32] "ContainerStatus from runtime service failed" err="rpc error: code = NotFound desc = could not find container \"900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d\": container with ID starting with 900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d not found: ID does not exist" containerID="900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.533888 4703 pod_container_deletor.go:53] "DeleteContainer returned error" containerID={"Type":"cri-o","ID":"900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d"} err="failed to get container status \"900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d\": rpc error: code = NotFound desc = could not find container \"900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d\": container with ID starting with 900fe638f51ed85dbed9fed9bc85699d3a8925bf605fcc7a9bfc5e0ce668068d not found: ID does not exist" Feb 02 13:28:11 crc kubenswrapper[4703]: I0202 13:28:11.941627 4703 kubelet_volumes.go:163] "Cleaned up orphaned pod volumes dir" podUID="5dde4d09-91cf-4235-91ed-d6593762fb77" path="/var/lib/kubelet/pods/5dde4d09-91cf-4235-91ed-d6593762fb77/volumes" var/home/core/zuul-output/logs/crc-cloud-workdir-crc-all-logs.tar.gz0000644000175000000000000000005515140123171024440 0ustar coreroot  Om77'(var/home/core/zuul-output/logs/crc-cloud/0000755000175000000000000000000015140123172017356 5ustar corerootvar/home/core/zuul-output/artifacts/0000755000175000017500000000000015140116403016500 5ustar corecorevar/home/core/zuul-output/docs/0000755000175000017500000000000015140116403015450 5ustar corecore